gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lenya.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.StringTokenizer;
import org.apache.log4j.Logger;
/**
* @version $Id$
*/
public final class FileUtil {
private static Logger log = Logger.getLogger(FileUtil.class);
/**
* DOCUMENT ME!
*
* @param args DOCUMENT ME!
*/
public static void main(String[] args) {
if (args.length == 0) {
System.err.println("Usage: java " + FileUtil.class.getName());
return;
}
if (args[0].equals("--copy")) {
if (args.length != 3) {
System.err.println("Usage: --copy source destination");
return;
}
try {
System.err.println("cp " + args[1] + " " + args[2]);
copy(args[1], args[2]);
} catch (FileNotFoundException e) {
System.err.println(e);
} catch (IOException e) {
System.err.println(e);
}
return;
}
if (args[0].equals("--concatPath")) {
// FIXME:
File file = org.apache.lenya.util.FileUtil.file(
"/root/temp/jpf-1.9/java/lenya/x/xps/samples/invoices/invoices",
"../addresses/lenya.xml");
System.out.println(file.getAbsolutePath());
} else {
}
}
/**
* Copying a file
*
* @param source_name DOCUMENT ME!
* @param destination_name DOCUMENT ME!
*
* @throws FileNotFoundException DOCUMENT ME!
* @throws IOException DOCUMENT ME!
*/
public static void copy(String source_name, String destination_name)
throws FileNotFoundException, IOException {
InputStream source = new FileInputStream(source_name);
File destination_file = new File(destination_name);
File parent = new File(destination_file.getParent());
if (!parent.exists()) {
parent.mkdirs();
log.debug("Directory has been created: " + parent.getAbsolutePath());
}
OutputStream destination = new FileOutputStream(destination_name);
byte[] bytes_buffer = new byte[1024];
int bytes_read;
while ((bytes_read = source.read(bytes_buffer)) >= 0) {
destination.write(bytes_buffer, 0, bytes_read);
}
}
/**
* Copy a single File or a complete Directory including its Contents.
*
* @param src the source File.
* @param dest the destiantion File.
*
* @throws FileNotFoundException if the source File does not exists.
* @throws IOException if an error occures in the io system.
*/
public static void copy(File src, File dest) throws FileNotFoundException, IOException {
if (src.isFile()) {
copySingleFile(src, dest);
} else {
File[] contents = src.listFiles();
if (contents == null)
return;
dest.mkdirs();
for (int i = 0; i < contents.length; i++) {
String destPath = dest.getAbsolutePath() + File.separator + contents[i].getName();
copy(contents[i], new File(destPath));
}
}
}
/**
* Copy a single File.
*
* @param src the source File.
* @param dest the destiantion File.
*
* @throws FileNotFoundException if the source File does not exists.
* @throws IOException if an error occures in the io system.
*/
protected static void copySingleFile(File src, File dest) throws FileNotFoundException,
IOException {
dest.getParentFile().mkdirs();
dest.createNewFile();
org.apache.commons.io.FileUtils.copyFile(src, dest);
}
/**
* Returns a file by specifying an absolute directory name and a relative file name
*
* @param absoluteDir DOCUMENT ME!
* @param relativeFile DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
public static File file(String absoluteDir, String relativeFile) {
File file = new File(fileName(absoluteDir, relativeFile));
return file;
}
/**
* Returns an absolute file name by specifying an absolute directory name and a relative file
* name
*
* @param absoluteDir DOCUMENT ME!
* @param relativeFile DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
public static String fileName(String absoluteDir, String relativeFile) {
String fileName = null;
StringBuffer newAbsoluteDir = new StringBuffer(absoluteDir);
if (absoluteDir.charAt(absoluteDir.length() - 1) != '/') {
newAbsoluteDir.append("/");
}
if (relativeFile.indexOf("../") == 0) {
StringTokenizer token = new StringTokenizer(newAbsoluteDir.toString(), "/");
newAbsoluteDir = new StringBuffer("/");
int numberOfTokens = token.countTokens();
for (int i = 0; i < (numberOfTokens - 1); i++) {
newAbsoluteDir.append(token.nextToken()).append("/");
}
String newRelativeFile = relativeFile.substring(3, relativeFile.length());
fileName = fileName(newAbsoluteDir.toString(), newRelativeFile);
} else if (relativeFile.indexOf("./") == 0) {
fileName = newAbsoluteDir + relativeFile.substring(2, relativeFile.length());
} else {
fileName = newAbsoluteDir + relativeFile;
}
return fileName;
}
/**
* Returns an absolute file name by specifying an absolute directory name and a relative file
* name
*
* @param absoluteFile DOCUMENT ME!
* @param relativeFile DOCUMENT ME!
*
* @return DOCUMENT ME!
*/
public static String concat(String absoluteFile, String relativeFile) {
File file = new File(absoluteFile);
if (file.isFile()) {
return fileName(file.getParent(), relativeFile);
}
return fileName(absoluteFile, relativeFile);
}
/**
* Deletes all dirs up to stop dir or if dirs in hirachy are not empty.
*
* @param start File to delete the parents of. The File itself is not deleted.
* @param stop Stop deleting at this dir. This dir is not deleted.
* @throws IllegalArgumentException If stop is not a dir or start is not a descending sibling of
* stop dir.
*/
public static void deleteParentDirs(File start, File stop) throws IllegalArgumentException {
if (!stop.isDirectory())
throw new IllegalArgumentException("Stop dir '" + stop.getAbsolutePath()
+ "' is not a directory");
if (!start.getAbsolutePath().startsWith(stop.getAbsolutePath()))
throw new IllegalArgumentException("Start dir '" + start.getAbsolutePath()
+ "' is not a descending sibling of stop directory '" + stop.getAbsolutePath()
+ "'.");
File parent = start.getParentFile();
while (!parent.equals(stop) && parent.delete())
parent = parent.getParentFile();
}
}
| |
/**
* Copyright 2016 Crawler-Commons
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package crawlercommons.sitemaps.sax;
import static crawlercommons.sitemaps.SiteMapParser.LOG;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
import crawlercommons.sitemaps.AbstractSiteMap;
import crawlercommons.sitemaps.Namespace;
import crawlercommons.sitemaps.UnknownFormatException;
import crawlercommons.sitemaps.extension.Extension;
/**
* Provides a base SAX handler for parsing of XML documents representing
* sub-classes of AbstractSiteMap.
*/
public class DelegatorHandler extends DefaultHandler {
private LinkedList<String> elementStack;
private DelegatorHandler delegate;
private URL url;
private boolean strict;
private boolean strictNamespace;
private UnknownFormatException exception;
private Set<String> acceptedNamespaces;
protected Map<String, Extension> extensionNamespaces;
private StringBuilder characterBuffer = new StringBuilder();
protected Function<String, String> urlFilter = (String url) -> url;
protected DelegatorHandler(LinkedList<String> elementStack, boolean strict) {
this.elementStack = elementStack;
this.strict = strict;
}
public DelegatorHandler(URL url, boolean strict) {
this.elementStack = new LinkedList<String>();
this.url = url;
this.strict = strict;
}
protected URL getUrl() {
return url;
}
protected boolean isStrict() {
return strict;
}
protected boolean isStrictNamespace() {
return strictNamespace;
}
public void setStrictNamespace(boolean s) {
strictNamespace = s;
}
public void setAcceptedNamespaces(Set<String> acceptedSet) {
acceptedNamespaces = acceptedSet;
}
protected boolean isAcceptedNamespace(String uri) {
return acceptedNamespaces.contains(uri);
}
public void setExtensionNamespaces(Map<String, Extension> extensionMap) {
extensionNamespaces = extensionMap;
}
protected boolean isExtensionNamespace(String uri) {
if (extensionNamespaces == null) {
return false;
}
return extensionNamespaces.containsKey(uri);
}
public void setURLFilter(Function<String, String> urlFilter) {
this.urlFilter = urlFilter;
}
protected void setException(UnknownFormatException exception) {
this.exception = exception;
}
public UnknownFormatException getException() {
return exception;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
if (elementStack.isEmpty() || delegate == null) {
startRootElement(uri, localName, qName, attributes);
} else {
elementStack.push(localName);
}
if (delegate != null) {
delegate.startElement(uri, localName, qName, attributes);
}
}
private void startRootElement(String uri, String localName, String qName, Attributes attributes) {
elementStack.push(localName);
if ("feed".equals(localName)) {
delegate = new AtomHandler(url, elementStack, strict);
}
// See if it is a RSS feed by looking for the localName "channel"
// element. This avoids the issue of having the outer tag named
// <rdf:RDF> that was causing this code to fail. Inside of
// the <rss> or <rdf> tag is a <channel> tag, so we can use that.
// See https://github.com/crawler-commons/crawler-commons/issues/87
// and also RSS 1.0 specification http://web.resource.org/rss/1.0/spec
else if ("channel".equals(localName)) {
delegate = new RSSHandler(url, elementStack, strict);
} else if ("sitemapindex".equals(localName)) {
delegate = new XMLIndexHandler(url, elementStack, strict);
} else if ("urlset".equals(localName)) {
delegate = new XMLHandler(url, elementStack, strict);
} else {
LOG.debug("Skipped unknown root element <{}> in {}", localName, url);
return;
}
// configure delegate
delegate.setStrictNamespace(isStrictNamespace());
delegate.setAcceptedNamespaces(acceptedNamespaces);
// validate XML namespace
if (isStrictNamespace()) {
if (delegate instanceof AtomHandler || delegate instanceof RSSHandler) {
// no namespace checking for feeds
return;
}
if (!isAcceptedNamespace(uri) && uri.startsWith("/")) {
// first, try to resolve relative namespace URI (deprecated but
// not forbidden), e.g., //www.sitemaps.org/schemas/sitemap/0.9
try {
URL u = new URL(url, uri);
uri = u.toString();
} catch (MalformedURLException e) {
LOG.warn("Failed to resolve relative namespace URI {} in sitemap {}", uri, url);
}
}
if (!isAcceptedNamespace(uri)) {
String msg;
if (!Namespace.isSupported(uri)) {
msg = "Unsupported namespace <" + uri + ">";
} else {
msg = "Namespace <" + uri + "> not accepted";
}
setException(new UnknownFormatException(msg));
delegate = null;
return;
}
}
delegate.setExtensionNamespaces(extensionNamespaces);
delegate.setURLFilter(urlFilter);
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (delegate != null) {
delegate.endElement(uri, localName, qName);
}
elementStack.pop();
}
@Override
public void characters(char ch[], int start, int length) throws SAXException {
if (delegate != null) {
delegate.characters(ch, start, length);
}
}
protected void appendCharacterBuffer(char ch[], int start, int length) {
for (int i = start; i < start + length; i++) {
characterBuffer.append(ch[i]);
}
}
protected void appendCharacterBuffer(String str) {
characterBuffer.append(str);
}
protected String getAndResetCharacterBuffer() {
String value = characterBuffer.toString();
resetCharacterBuffer();
return value;
}
protected void resetCharacterBuffer() {
characterBuffer = new StringBuilder();
}
protected String currentElement() {
return elementStack.peek();
}
protected String currentElementParent() {
return (elementStack.size() < 2) ? null : elementStack.get(1);
}
public AbstractSiteMap getSiteMap() {
if (delegate == null)
return null;
return delegate.getSiteMap();
}
@Override
public void error(SAXParseException e) throws SAXException {
if (delegate != null) {
delegate.error(e);
}
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
if (delegate != null) {
delegate.fatalError(e);
}
}
/**
* Return true if character sequence contains only white space including
* Unicode whitespace, cf. {@link #isWhitespace(char)}
*/
public static boolean isAllBlank(CharSequence charSeq) {
for (int i = 0; i < charSeq.length(); i++) {
if (!isWhitespace(charSeq.charAt(i))) {
return false;
}
}
return true;
}
/**
* Check whether character is any Unicode whitespace, including the space
* characters not covered by {@link Character#isWhitespace(char)}
*/
public static boolean isWhitespace(char c) {
return Character.isWhitespace(c) || c == '\u00a0' || c == '\u2007' || c == '\u202f';
}
/** Trim all whitespace including Unicode whitespace */
public static String stripAllBlank(CharSequence charSeq) {
if (charSeq.length() == 0) {
return charSeq.toString();
}
int start = 0;
int end = charSeq.length() - 1;
while (isWhitespace(charSeq.charAt(start)) && start < end) {
start++;
}
if (start < end) {
while (isWhitespace(charSeq.charAt(end))) {
end--;
}
}
return charSeq.subSequence(start, end + 1).toString();
}
}
| |
/**
*/
package org.xtext.mgpl.mgplDSL.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.xtext.mgpl.mgplDSL.AttrList;
import org.xtext.mgpl.mgplDSL.Block;
import org.xtext.mgpl.mgplDSL.Declaration;
import org.xtext.mgpl.mgplDSL.MgplDSLPackage;
import org.xtext.mgpl.mgplDSL.Model;
import org.xtext.mgpl.mgplDSL.StatementBlock;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Model</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.xtext.mgpl.mgplDSL.impl.ModelImpl#getName <em>Name</em>}</li>
* <li>{@link org.xtext.mgpl.mgplDSL.impl.ModelImpl#getAttr <em>Attr</em>}</li>
* <li>{@link org.xtext.mgpl.mgplDSL.impl.ModelImpl#getDecl <em>Decl</em>}</li>
* <li>{@link org.xtext.mgpl.mgplDSL.impl.ModelImpl#getStmt <em>Stmt</em>}</li>
* <li>{@link org.xtext.mgpl.mgplDSL.impl.ModelImpl#getBlocks <em>Blocks</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ModelImpl extends MinimalEObjectImpl.Container implements Model
{
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The cached value of the '{@link #getAttr() <em>Attr</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAttr()
* @generated
* @ordered
*/
protected AttrList attr;
/**
* The cached value of the '{@link #getDecl() <em>Decl</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDecl()
* @generated
* @ordered
*/
protected EList<Declaration> decl;
/**
* The cached value of the '{@link #getStmt() <em>Stmt</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getStmt()
* @generated
* @ordered
*/
protected StatementBlock stmt;
/**
* The cached value of the '{@link #getBlocks() <em>Blocks</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBlocks()
* @generated
* @ordered
*/
protected EList<Block> blocks;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ModelImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return MgplDSLPackage.Literals.MODEL;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName()
{
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName)
{
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MgplDSLPackage.MODEL__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public AttrList getAttr()
{
return attr;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetAttr(AttrList newAttr, NotificationChain msgs)
{
AttrList oldAttr = attr;
attr = newAttr;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MgplDSLPackage.MODEL__ATTR, oldAttr, newAttr);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setAttr(AttrList newAttr)
{
if (newAttr != attr)
{
NotificationChain msgs = null;
if (attr != null)
msgs = ((InternalEObject)attr).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - MgplDSLPackage.MODEL__ATTR, null, msgs);
if (newAttr != null)
msgs = ((InternalEObject)newAttr).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - MgplDSLPackage.MODEL__ATTR, null, msgs);
msgs = basicSetAttr(newAttr, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MgplDSLPackage.MODEL__ATTR, newAttr, newAttr));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Declaration> getDecl()
{
if (decl == null)
{
decl = new EObjectContainmentEList<Declaration>(Declaration.class, this, MgplDSLPackage.MODEL__DECL);
}
return decl;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public StatementBlock getStmt()
{
return stmt;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetStmt(StatementBlock newStmt, NotificationChain msgs)
{
StatementBlock oldStmt = stmt;
stmt = newStmt;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MgplDSLPackage.MODEL__STMT, oldStmt, newStmt);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setStmt(StatementBlock newStmt)
{
if (newStmt != stmt)
{
NotificationChain msgs = null;
if (stmt != null)
msgs = ((InternalEObject)stmt).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - MgplDSLPackage.MODEL__STMT, null, msgs);
if (newStmt != null)
msgs = ((InternalEObject)newStmt).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - MgplDSLPackage.MODEL__STMT, null, msgs);
msgs = basicSetStmt(newStmt, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MgplDSLPackage.MODEL__STMT, newStmt, newStmt));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Block> getBlocks()
{
if (blocks == null)
{
blocks = new EObjectContainmentEList<Block>(Block.class, this, MgplDSLPackage.MODEL__BLOCKS);
}
return blocks;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case MgplDSLPackage.MODEL__ATTR:
return basicSetAttr(null, msgs);
case MgplDSLPackage.MODEL__DECL:
return ((InternalEList<?>)getDecl()).basicRemove(otherEnd, msgs);
case MgplDSLPackage.MODEL__STMT:
return basicSetStmt(null, msgs);
case MgplDSLPackage.MODEL__BLOCKS:
return ((InternalEList<?>)getBlocks()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case MgplDSLPackage.MODEL__NAME:
return getName();
case MgplDSLPackage.MODEL__ATTR:
return getAttr();
case MgplDSLPackage.MODEL__DECL:
return getDecl();
case MgplDSLPackage.MODEL__STMT:
return getStmt();
case MgplDSLPackage.MODEL__BLOCKS:
return getBlocks();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case MgplDSLPackage.MODEL__NAME:
setName((String)newValue);
return;
case MgplDSLPackage.MODEL__ATTR:
setAttr((AttrList)newValue);
return;
case MgplDSLPackage.MODEL__DECL:
getDecl().clear();
getDecl().addAll((Collection<? extends Declaration>)newValue);
return;
case MgplDSLPackage.MODEL__STMT:
setStmt((StatementBlock)newValue);
return;
case MgplDSLPackage.MODEL__BLOCKS:
getBlocks().clear();
getBlocks().addAll((Collection<? extends Block>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case MgplDSLPackage.MODEL__NAME:
setName(NAME_EDEFAULT);
return;
case MgplDSLPackage.MODEL__ATTR:
setAttr((AttrList)null);
return;
case MgplDSLPackage.MODEL__DECL:
getDecl().clear();
return;
case MgplDSLPackage.MODEL__STMT:
setStmt((StatementBlock)null);
return;
case MgplDSLPackage.MODEL__BLOCKS:
getBlocks().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case MgplDSLPackage.MODEL__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case MgplDSLPackage.MODEL__ATTR:
return attr != null;
case MgplDSLPackage.MODEL__DECL:
return decl != null && !decl.isEmpty();
case MgplDSLPackage.MODEL__STMT:
return stmt != null;
case MgplDSLPackage.MODEL__BLOCKS:
return blocks != null && !blocks.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //ModelImpl
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.maddyhome.idea.copyright.actions;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ContentIterator;
import com.intellij.openapi.roots.ModuleFileIndex;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.IncorrectOperationException;
import com.maddyhome.idea.copyright.CopyrightManager;
import com.maddyhome.idea.copyright.CopyrightProfile;
import com.maddyhome.idea.copyright.CopyrightUpdaters;
import consulo.logging.Logger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public abstract class AbstractFileProcessor {
private final Project myProject;
private final Module myModule;
private PsiDirectory directory = null;
private PsiFile file = null;
private PsiFile[] files = null;
private boolean subdirs = false;
private final String message;
private final String title;
protected abstract Runnable preprocessFile(PsiFile psifile) throws IncorrectOperationException;
protected AbstractFileProcessor(Project project, String title, String message) {
myProject = project;
myModule = null;
directory = null;
subdirs = true;
this.title = title;
this.message = message;
}
protected AbstractFileProcessor(Project project, Module module, String title, String message) {
myProject = project;
myModule = module;
directory = null;
subdirs = true;
this.title = title;
this.message = message;
}
protected AbstractFileProcessor(Project project, PsiDirectory dir, boolean subdirs, String title, String message) {
myProject = project;
myModule = null;
directory = dir;
this.subdirs = subdirs;
this.message = message;
this.title = title;
}
protected AbstractFileProcessor(Project project, PsiFile file, String title, String message) {
myProject = project;
myModule = null;
this.file = file;
this.message = message;
this.title = title;
}
protected AbstractFileProcessor(Project project, PsiFile[] files, String title, String message, Runnable runnable) {
myProject = project;
myModule = null;
this.files = files;
this.message = message;
this.title = title;
}
public void run() {
if (directory != null) {
process(directory, subdirs);
}
else if (files != null) {
process(files);
}
else if (file != null) {
process(file);
}
else if (myModule != null) {
process(myModule);
}
else if (myProject != null) {
process(myProject);
}
}
private void process(final PsiFile file) {
if (!FileModificationService.getInstance().preparePsiElementForWrite(file)) return;
final Runnable[] resultRunnable = new Runnable[1];
execute(new Runnable() {
public void run() {
try {
resultRunnable[0] = preprocessFile(file);
}
catch (IncorrectOperationException incorrectoperationexception) {
logger.error(incorrectoperationexception);
}
}
}, new Runnable() {
public void run() {
if (resultRunnable[0] != null) {
resultRunnable[0].run();
}
}
}
);
}
private Runnable prepareFiles(List<PsiFile> files) {
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
String msg = null;
double fraction = 0.0D;
if (indicator != null) {
msg = indicator.getText();
fraction = indicator.getFraction();
indicator.setText(message);
}
final Runnable[] runnables = new Runnable[files.size()];
for (int i = 0; i < files.size(); i++) {
PsiFile pfile = files.get(i);
if (pfile == null) {
logger.debug("Unexpected null file at " + i);
continue;
}
if (indicator != null) {
if (indicator.isCanceled()) {
return null;
}
indicator.setFraction((double)i / (double)files.size());
}
if (pfile.isWritable()) {
try {
runnables[i] = preprocessFile(pfile);
}
catch (IncorrectOperationException incorrectoperationexception) {
logger.error(incorrectoperationexception);
}
}
files.set(i, null);
}
if (indicator != null) {
indicator.setText(msg);
indicator.setFraction(fraction);
}
return new Runnable() {
public void run() {
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
String msg = null;
double fraction = 0.0D;
if (indicator != null) {
msg = indicator.getText();
fraction = indicator.getFraction();
indicator.setText(message);
}
for (int j = 0; j < runnables.length; j++) {
if (indicator != null) {
if (indicator.isCanceled()) {
return;
}
indicator.setFraction((double)j / (double)runnables.length);
}
Runnable runnable = runnables[j];
if (runnable != null) {
runnable.run();
}
runnables[j] = null;
}
if (indicator != null) {
indicator.setText(msg);
indicator.setFraction(fraction);
}
}
};
}
private void process(final PsiFile[] files) {
final Runnable[] resultRunnable = new Runnable[1];
execute(new Runnable() {
public void run() {
resultRunnable[0] = prepareFiles(new ArrayList<PsiFile>(Arrays.asList(files)));
}
}, new Runnable() {
public void run() {
if (resultRunnable[0] != null) {
resultRunnable[0].run();
}
}
}
);
}
private void process(final PsiDirectory dir, final boolean subdirs) {
final List<PsiFile> pfiles = new ArrayList<PsiFile>();
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
findFiles(pfiles, dir, subdirs);
}
}, title, true, myProject);
handleFiles(pfiles);
}
private void process(final Project project) {
final List<PsiFile> pfiles = new ArrayList<PsiFile>();
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
findFiles(project, pfiles);
}
}, title, true, project);
handleFiles(pfiles);
}
private void process(final Module module) {
final List<PsiFile> pfiles = new ArrayList<PsiFile>();
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
findFiles(module, pfiles);
}
}, title, true, myProject);
handleFiles(pfiles);
}
private static void findFiles(Project project, List<PsiFile> files) {
Module[] modules = ModuleManager.getInstance(project).getModules();
for (Module module : modules) {
findFiles(module, files);
}
}
protected static void findFiles(final Module module, final List<PsiFile> files) {
final ModuleFileIndex idx = ModuleRootManager.getInstance(module).getFileIndex();
final VirtualFile[] roots = ModuleRootManager.getInstance(module).getContentRoots();
for (VirtualFile root : roots) {
idx.iterateContentUnderDirectory(root, new ContentIterator() {
public boolean processFile(final VirtualFile dir) {
if (dir.isDirectory()) {
final PsiDirectory psiDir = PsiManager.getInstance(module.getProject()).findDirectory(dir);
if (psiDir != null) {
findFiles(files, psiDir, false);
}
}
return true;
}
});
}
}
private void handleFiles(final List<PsiFile> files) {
final List<VirtualFile> vFiles = new ArrayList<VirtualFile>();
for (PsiFile psiFile : files) {
vFiles.add(psiFile.getVirtualFile());
}
if (!ReadonlyStatusHandler.getInstance(myProject).ensureFilesWritable(VfsUtil.toVirtualFileArray(vFiles)).hasReadonlyFiles()) {
if (!files.isEmpty()) {
final Runnable[] resultRunnable = new Runnable[1];
execute(new Runnable() {
public void run() {
resultRunnable[0] = prepareFiles(files);
}
}, new Runnable() {
public void run() {
if (resultRunnable[0] != null) {
resultRunnable[0].run();
}
}
}
);
}
}
}
private static void findFiles(List<PsiFile> files, PsiDirectory directory, boolean subdirs) {
final Project project = directory.getProject();
PsiFile[] locals = directory.getFiles();
for (PsiFile local : locals) {
CopyrightProfile opts = CopyrightManager.getInstance(project).getCopyrightOptions(local);
if (opts != null && CopyrightUpdaters.hasExtension(local)) {
files.add(local);
}
}
if (subdirs) {
PsiDirectory[] dirs = directory.getSubdirectories();
for (PsiDirectory dir : dirs) {
findFiles(files, dir, subdirs);
}
}
}
private void execute(final Runnable readAction, final Runnable writeAction) {
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
readAction.run();
}
}, title, true, myProject);
new WriteCommandAction(myProject, title) {
protected void run(Result result) throws Throwable {
writeAction.run();
}
}.execute();
}
private static final Logger logger = Logger.getInstance(AbstractFileProcessor.class);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.sls.appmaster;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* AMSimulator that simulates DAG - it requests for containers
* based on the delay specified. It finishes when all the tasks
* are completed.
* Vocabulary Used:
* <dl>
* <dt>Pending</dt><dd>requests which are NOT yet sent to RM.</dd>
* <dt>Scheduled</dt>
* <dd>requests which are sent to RM but not yet assigned.</dd>
* <dt>Assigned</dt><dd>requests which are assigned to a container.</dd>
* <dt>Completed</dt>
* <dd>request corresponding to which container has completed.</dd>
* </dl>
* Containers are requested based on the request delay.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class DAGAMSimulator extends AMSimulator {
private static final int PRIORITY = 20;
private List<ContainerSimulator> pendingContainers =
new LinkedList<>();
private List<ContainerSimulator> scheduledContainers =
new LinkedList<>();
private Map<ContainerId, ContainerSimulator> assignedContainers =
new HashMap<>();
private List<ContainerSimulator> completedContainers =
new LinkedList<>();
private List<ContainerSimulator> allContainers =
new LinkedList<>();
private boolean isFinished = false;
private long amStartTime;
private static final Logger LOG =
LoggerFactory.getLogger(DAGAMSimulator.class);
@SuppressWarnings("checkstyle:parameternumber")
public void init(int heartbeatInterval,
List<ContainerSimulator> containerList, ResourceManager resourceManager,
SLSRunner slsRunnner, long startTime, long finishTime, String simUser,
String simQueue, boolean tracked, String oldApp, long baseTimeMS,
Resource amResource, String nodeLabelExpr, Map<String, String> params,
Map<ApplicationId, AMSimulator> appIdAMSim) {
super.init(heartbeatInterval, containerList, resourceManager, slsRunnner,
startTime, finishTime, simUser, simQueue, tracked, oldApp, baseTimeMS,
amResource, nodeLabelExpr, params, appIdAMSim);
super.amtype = "dag";
allContainers.addAll(containerList);
pendingContainers.addAll(containerList);
totalContainers = allContainers.size();
LOG.info("Added new job with {} containers", allContainers.size());
}
@Override
public void firstStep() throws Exception {
super.firstStep();
amStartTime = System.currentTimeMillis();
}
@Override
public void initReservation(ReservationId reservationId,
long deadline, long now) {
// DAG AM doesn't support reservation
setReservationRequest(null);
}
@Override
public synchronized void notifyAMContainerLaunched(Container masterContainer)
throws Exception {
if (null != masterContainer) {
restart();
super.notifyAMContainerLaunched(masterContainer);
}
}
protected void processResponseQueue() throws Exception {
while (!responseQueue.isEmpty()) {
AllocateResponse response = responseQueue.take();
// check completed containers
if (!response.getCompletedContainersStatuses().isEmpty()) {
for (ContainerStatus cs : response.getCompletedContainersStatuses()) {
ContainerId containerId = cs.getContainerId();
if (cs.getExitStatus() == ContainerExitStatus.SUCCESS) {
if (assignedContainers.containsKey(containerId)) {
LOG.debug("Application {} has one container finished ({}).",
appId, containerId);
ContainerSimulator containerSimulator =
assignedContainers.remove(containerId);
finishedContainers++;
completedContainers.add(containerSimulator);
} else if (amContainer.getId().equals(containerId)) {
// am container released event
isFinished = true;
LOG.info("Application {} goes to finish.", appId);
}
if (finishedContainers >= totalContainers) {
lastStep();
}
} else {
// container to be killed
if (assignedContainers.containsKey(containerId)) {
LOG.error("Application {} has one container killed ({}).", appId,
containerId);
pendingContainers.add(assignedContainers.remove(containerId));
} else if (amContainer.getId().equals(containerId)) {
LOG.error("Application {}'s AM is "
+ "going to be killed. Waiting for rescheduling...", appId);
}
}
}
}
// check finished
if (isAMContainerRunning &&
(finishedContainers >= totalContainers)) {
isAMContainerRunning = false;
LOG.info("Application {} sends out event to clean up"
+ " its AM container.", appId);
isFinished = true;
break;
}
// check allocated containers
for (Container container : response.getAllocatedContainers()) {
if (!scheduledContainers.isEmpty()) {
ContainerSimulator cs = scheduledContainers.remove(0);
LOG.debug("Application {} starts to launch a container ({}).",
appId, container.getId());
assignedContainers.put(container.getId(), cs);
se.getNmMap().get(container.getNodeId())
.addNewContainer(container, cs.getLifeTime());
}
}
}
}
@Override
protected void sendContainerRequest() throws Exception {
if (isFinished) {
return;
}
// send out request
List<ResourceRequest> ask = null;
if (finishedContainers != totalContainers) {
if (!pendingContainers.isEmpty()) {
List<ContainerSimulator> toBeScheduled =
getToBeScheduledContainers(pendingContainers, amStartTime);
if (toBeScheduled.size() > 0) {
ask = packageRequests(toBeScheduled, PRIORITY);
LOG.info("Application {} sends out request for {} containers.",
appId, toBeScheduled.size());
scheduledContainers.addAll(toBeScheduled);
pendingContainers.removeAll(toBeScheduled);
toBeScheduled.clear();
}
}
}
if (ask == null) {
ask = new ArrayList<>();
}
final AllocateRequest request = createAllocateRequest(ask);
if (totalContainers == 0) {
request.setProgress(1.0f);
} else {
request.setProgress((float) finishedContainers / totalContainers);
}
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser(appAttemptId.toString());
Token<AMRMTokenIdentifier> token = rm.getRMContext().getRMApps()
.get(appAttemptId.getApplicationId())
.getRMAppAttempt(appAttemptId).getAMRMToken();
ugi.addTokenIdentifier(token.decodeIdentifier());
AllocateResponse response = ugi.doAs(
(PrivilegedExceptionAction<AllocateResponse>) () -> rm
.getApplicationMasterService().allocate(request));
if (response != null) {
responseQueue.put(response);
}
}
@VisibleForTesting
public List<ContainerSimulator> getToBeScheduledContainers(
List<ContainerSimulator> containers, long startTime) {
List<ContainerSimulator> toBeScheduled = new LinkedList<>();
for (ContainerSimulator cs : containers) {
// only request for the container if it is time to request
if (cs.getRequestDelay() + startTime <=
System.currentTimeMillis()) {
toBeScheduled.add(cs);
}
}
return toBeScheduled;
}
@Override
protected void checkStop() {
if (isFinished) {
super.setEndTime(System.currentTimeMillis());
}
}
@Override
public void lastStep() throws Exception {
super.lastStep();
//clear data structures.
allContainers.clear();
pendingContainers.clear();
scheduledContainers.clear();
assignedContainers.clear();
completedContainers.clear();
}
/**
* restart running because of the am container killed.
*/
private void restart() {
isFinished = false;
pendingContainers.clear();
pendingContainers.addAll(allContainers);
pendingContainers.removeAll(completedContainers);
amContainer = null;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.serviceusage.v1.model;
/**
* `QuotaLimit` defines a specific limit that applies over a specified duration for a limit type.
* There can be at most one limit for a duration and limit type combination defined within a
* `QuotaGroup`.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Service Usage API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class QuotaLimit extends com.google.api.client.json.GenericJson {
/**
* Default number of tokens that can be consumed during the specified duration. This is the number
* of tokens assigned when a client application developer activates the service for his/her
* project.
*
* Specifying a value of 0 will block all requests. This can be used if you are provisioning quota
* to selected consumers and blocking others. Similarly, a value of -1 will indicate an unlimited
* quota. No other negative values are allowed.
*
* Used by group-based quotas only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long defaultLimit;
/**
* Optional. User-visible, extended description for this quota limit. Should be used only when
* more context is needed to understand this limit than provided by the limit's display name (see:
* `display_name`).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* User-visible display name for this limit. Optional. If not set, the UI will provide a default
* display name based on the quota configuration. This field can be used to override the default
* display name generated from the configuration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Duration of this limit in textual notation. Example: "100s", "24h", "1d". For duration longer
* than a day, only multiple of days is supported. We support only "100s" and "1d" for now.
* Additional support will be added in the future. "0" indicates indefinite duration.
*
* Used by group-based quotas only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String duration;
/**
* Free tier value displayed in the Developers Console for this limit. The free tier is the number
* of tokens that will be subtracted from the billed amount when billing is enabled. This field
* can only be set on a limit with duration "1d", in a billable group; it is invalid on any other
* limit. If this field is not set, it defaults to 0, indicating that there is no free tier for
* this service.
*
* Used by group-based quotas only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long freeTier;
/**
* Maximum number of tokens that can be consumed during the specified duration. Client application
* developers can override the default limit up to this maximum. If specified, this value cannot
* be set to a value less than the default limit. If not specified, it is set to the default
* limit.
*
* To allow clients to apply overrides with no upper bound, set this to -1, indicating unlimited
* maximum quota.
*
* Used by group-based quotas only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long maxLimit;
/**
* The name of the metric this quota limit applies to. The quota limits with the same metric will
* be checked together during runtime. The metric must be defined within the service config.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String metric;
/**
* Name of the quota limit.
*
* The name must be provided, and it must be unique within the service. The name can only include
* alphanumeric characters as well as '-'.
*
* The maximum length of the limit name is 64 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit
* kinds are determined by the quota backend system.
*
* Here are some examples: * "1/min/{project}" for quota per minute per project.
*
* Note: the order of unit components is insignificant. The "1" at the beginning is required to
* follow the metric unit syntax.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String unit;
/**
* Tiered limit values. You must specify this as a key:value pair, with an integer value that is
* the maximum number of requests allowed for the specified unit. Currently only STANDARD is
* supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.Map<String, java.lang.Long> values;
/**
* Default number of tokens that can be consumed during the specified duration. This is the number
* of tokens assigned when a client application developer activates the service for his/her
* project.
*
* Specifying a value of 0 will block all requests. This can be used if you are provisioning quota
* to selected consumers and blocking others. Similarly, a value of -1 will indicate an unlimited
* quota. No other negative values are allowed.
*
* Used by group-based quotas only.
* @return value or {@code null} for none
*/
public java.lang.Long getDefaultLimit() {
return defaultLimit;
}
/**
* Default number of tokens that can be consumed during the specified duration. This is the number
* of tokens assigned when a client application developer activates the service for his/her
* project.
*
* Specifying a value of 0 will block all requests. This can be used if you are provisioning quota
* to selected consumers and blocking others. Similarly, a value of -1 will indicate an unlimited
* quota. No other negative values are allowed.
*
* Used by group-based quotas only.
* @param defaultLimit defaultLimit or {@code null} for none
*/
public QuotaLimit setDefaultLimit(java.lang.Long defaultLimit) {
this.defaultLimit = defaultLimit;
return this;
}
/**
* Optional. User-visible, extended description for this quota limit. Should be used only when
* more context is needed to understand this limit than provided by the limit's display name (see:
* `display_name`).
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* Optional. User-visible, extended description for this quota limit. Should be used only when
* more context is needed to understand this limit than provided by the limit's display name (see:
* `display_name`).
* @param description description or {@code null} for none
*/
public QuotaLimit setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* User-visible display name for this limit. Optional. If not set, the UI will provide a default
* display name based on the quota configuration. This field can be used to override the default
* display name generated from the configuration.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* User-visible display name for this limit. Optional. If not set, the UI will provide a default
* display name based on the quota configuration. This field can be used to override the default
* display name generated from the configuration.
* @param displayName displayName or {@code null} for none
*/
public QuotaLimit setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Duration of this limit in textual notation. Example: "100s", "24h", "1d". For duration longer
* than a day, only multiple of days is supported. We support only "100s" and "1d" for now.
* Additional support will be added in the future. "0" indicates indefinite duration.
*
* Used by group-based quotas only.
* @return value or {@code null} for none
*/
public java.lang.String getDuration() {
return duration;
}
/**
* Duration of this limit in textual notation. Example: "100s", "24h", "1d". For duration longer
* than a day, only multiple of days is supported. We support only "100s" and "1d" for now.
* Additional support will be added in the future. "0" indicates indefinite duration.
*
* Used by group-based quotas only.
* @param duration duration or {@code null} for none
*/
public QuotaLimit setDuration(java.lang.String duration) {
this.duration = duration;
return this;
}
/**
* Free tier value displayed in the Developers Console for this limit. The free tier is the number
* of tokens that will be subtracted from the billed amount when billing is enabled. This field
* can only be set on a limit with duration "1d", in a billable group; it is invalid on any other
* limit. If this field is not set, it defaults to 0, indicating that there is no free tier for
* this service.
*
* Used by group-based quotas only.
* @return value or {@code null} for none
*/
public java.lang.Long getFreeTier() {
return freeTier;
}
/**
* Free tier value displayed in the Developers Console for this limit. The free tier is the number
* of tokens that will be subtracted from the billed amount when billing is enabled. This field
* can only be set on a limit with duration "1d", in a billable group; it is invalid on any other
* limit. If this field is not set, it defaults to 0, indicating that there is no free tier for
* this service.
*
* Used by group-based quotas only.
* @param freeTier freeTier or {@code null} for none
*/
public QuotaLimit setFreeTier(java.lang.Long freeTier) {
this.freeTier = freeTier;
return this;
}
/**
* Maximum number of tokens that can be consumed during the specified duration. Client application
* developers can override the default limit up to this maximum. If specified, this value cannot
* be set to a value less than the default limit. If not specified, it is set to the default
* limit.
*
* To allow clients to apply overrides with no upper bound, set this to -1, indicating unlimited
* maximum quota.
*
* Used by group-based quotas only.
* @return value or {@code null} for none
*/
public java.lang.Long getMaxLimit() {
return maxLimit;
}
/**
* Maximum number of tokens that can be consumed during the specified duration. Client application
* developers can override the default limit up to this maximum. If specified, this value cannot
* be set to a value less than the default limit. If not specified, it is set to the default
* limit.
*
* To allow clients to apply overrides with no upper bound, set this to -1, indicating unlimited
* maximum quota.
*
* Used by group-based quotas only.
* @param maxLimit maxLimit or {@code null} for none
*/
public QuotaLimit setMaxLimit(java.lang.Long maxLimit) {
this.maxLimit = maxLimit;
return this;
}
/**
* The name of the metric this quota limit applies to. The quota limits with the same metric will
* be checked together during runtime. The metric must be defined within the service config.
* @return value or {@code null} for none
*/
public java.lang.String getMetric() {
return metric;
}
/**
* The name of the metric this quota limit applies to. The quota limits with the same metric will
* be checked together during runtime. The metric must be defined within the service config.
* @param metric metric or {@code null} for none
*/
public QuotaLimit setMetric(java.lang.String metric) {
this.metric = metric;
return this;
}
/**
* Name of the quota limit.
*
* The name must be provided, and it must be unique within the service. The name can only include
* alphanumeric characters as well as '-'.
*
* The maximum length of the limit name is 64 characters.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the quota limit.
*
* The name must be provided, and it must be unique within the service. The name can only include
* alphanumeric characters as well as '-'.
*
* The maximum length of the limit name is 64 characters.
* @param name name or {@code null} for none
*/
public QuotaLimit setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit
* kinds are determined by the quota backend system.
*
* Here are some examples: * "1/min/{project}" for quota per minute per project.
*
* Note: the order of unit components is insignificant. The "1" at the beginning is required to
* follow the metric unit syntax.
* @return value or {@code null} for none
*/
public java.lang.String getUnit() {
return unit;
}
/**
* Specify the unit of the quota limit. It uses the same syntax as Metric.unit. The supported unit
* kinds are determined by the quota backend system.
*
* Here are some examples: * "1/min/{project}" for quota per minute per project.
*
* Note: the order of unit components is insignificant. The "1" at the beginning is required to
* follow the metric unit syntax.
* @param unit unit or {@code null} for none
*/
public QuotaLimit setUnit(java.lang.String unit) {
this.unit = unit;
return this;
}
/**
* Tiered limit values. You must specify this as a key:value pair, with an integer value that is
* the maximum number of requests allowed for the specified unit. Currently only STANDARD is
* supported.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.Long> getValues() {
return values;
}
/**
* Tiered limit values. You must specify this as a key:value pair, with an integer value that is
* the maximum number of requests allowed for the specified unit. Currently only STANDARD is
* supported.
* @param values values or {@code null} for none
*/
public QuotaLimit setValues(java.util.Map<String, java.lang.Long> values) {
this.values = values;
return this;
}
@Override
public QuotaLimit set(String fieldName, Object value) {
return (QuotaLimit) super.set(fieldName, value);
}
@Override
public QuotaLimit clone() {
return (QuotaLimit) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.file.rfile;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.accumulo.core.data.ArrayByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.rfile.RelativeKey.SkippR;
import org.apache.accumulo.core.util.MutableByteSequence;
import org.apache.accumulo.core.util.UnsynchronizedBuffer;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class RelativeKeyTest {
@Test
public void testBasicRelativeKey() {
assertEquals(1, UnsynchronizedBuffer.nextArraySize(0));
assertEquals(1, UnsynchronizedBuffer.nextArraySize(1));
assertEquals(2, UnsynchronizedBuffer.nextArraySize(2));
assertEquals(4, UnsynchronizedBuffer.nextArraySize(3));
assertEquals(4, UnsynchronizedBuffer.nextArraySize(4));
assertEquals(8, UnsynchronizedBuffer.nextArraySize(5));
assertEquals(8, UnsynchronizedBuffer.nextArraySize(8));
assertEquals(16, UnsynchronizedBuffer.nextArraySize(9));
assertEquals(1 << 16, UnsynchronizedBuffer.nextArraySize((1 << 16) - 1));
assertEquals(1 << 16, UnsynchronizedBuffer.nextArraySize(1 << 16));
assertEquals(1 << 17, UnsynchronizedBuffer.nextArraySize((1 << 16) + 1));
assertEquals(1 << 30, UnsynchronizedBuffer.nextArraySize((1 << 30) - 1));
assertEquals(1 << 30, UnsynchronizedBuffer.nextArraySize(1 << 30));
assertEquals(Integer.MAX_VALUE, UnsynchronizedBuffer.nextArraySize(Integer.MAX_VALUE - 1));
assertEquals(Integer.MAX_VALUE, UnsynchronizedBuffer.nextArraySize(Integer.MAX_VALUE));
}
@Test
public void testCommonPrefix() {
// exact matches
ArrayByteSequence exact = new ArrayByteSequence("abc");
assertEquals(-1, RelativeKey.getCommonPrefix(exact, exact));
assertEquals(-1, commonPrefixHelper("", ""));
assertEquals(-1, commonPrefixHelper("a", "a"));
assertEquals(-1, commonPrefixHelper("aa", "aa"));
assertEquals(-1, commonPrefixHelper("aaa", "aaa"));
assertEquals(-1, commonPrefixHelper("abab", "abab"));
assertEquals(-1, commonPrefixHelper(new String("aaa"), new ArrayByteSequence("aaa").toString()));
assertEquals(-1, commonPrefixHelper("abababababab".substring(3, 6), "ccababababcc".substring(3, 6)));
// no common prefix
assertEquals(0, commonPrefixHelper("", "a"));
assertEquals(0, commonPrefixHelper("a", ""));
assertEquals(0, commonPrefixHelper("a", "b"));
assertEquals(0, commonPrefixHelper("aaaa", "bbbb"));
// some common prefix
assertEquals(1, commonPrefixHelper("a", "ab"));
assertEquals(1, commonPrefixHelper("ab", "ac"));
assertEquals(1, commonPrefixHelper("ab", "ac"));
assertEquals(2, commonPrefixHelper("aa", "aaaa"));
assertEquals(4, commonPrefixHelper("aaaaa", "aaaab"));
}
private int commonPrefixHelper(String a, String b) {
return RelativeKey.getCommonPrefix(new ArrayByteSequence(a), new ArrayByteSequence(b));
}
@Test
public void testReadWritePrefix() throws IOException {
Key prevKey = new Key("row1", "columnfamily1", "columnqualifier1", "columnvisibility1", 1000);
Key newKey = new Key("row2", "columnfamily2", "columnqualifier2", "columnvisibility2", 3000);
RelativeKey expected = new RelativeKey(prevKey, newKey);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
expected.write(out);
RelativeKey actual = new RelativeKey();
actual.setPrevKey(prevKey);
actual.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
assertEquals(expected.getKey(), actual.getKey());
}
private static ArrayList<Key> expectedKeys;
private static ArrayList<Value> expectedValues;
private static ArrayList<Integer> expectedPositions;
private static ByteArrayOutputStream baos;
@BeforeClass
public static void initSource() throws IOException {
int initialListSize = 10000;
baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
expectedKeys = new ArrayList<>(initialListSize);
expectedValues = new ArrayList<>(initialListSize);
expectedPositions = new ArrayList<>(initialListSize);
Key prev = null;
int val = 0;
for (int row = 0; row < 4; row++) {
String rowS = RFileTest.formatString("r_", row);
for (int cf = 0; cf < 4; cf++) {
String cfS = RFileTest.formatString("cf_", cf);
for (int cq = 0; cq < 4; cq++) {
String cqS = RFileTest.formatString("cq_", cq);
for (int cv = 'A'; cv < 'A' + 4; cv++) {
String cvS = "" + (char) cv;
for (int ts = 4; ts > 0; ts--) {
Key k = RFileTest.newKey(rowS, cfS, cqS, cvS, ts);
k.setDeleted(true);
Value v = RFileTest.newValue("" + val);
expectedPositions.add(out.size());
new RelativeKey(prev, k).write(out);
prev = k;
v.write(out);
expectedKeys.add(k);
expectedValues.add(v);
k = RFileTest.newKey(rowS, cfS, cqS, cvS, ts);
v = RFileTest.newValue("" + val);
expectedPositions.add(out.size());
new RelativeKey(prev, k).write(out);
prev = k;
v.write(out);
expectedKeys.add(k);
expectedValues.add(v);
val++;
}
}
}
}
}
}
private DataInputStream in;
@Before
public void setupDataInputStream() {
in = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
in.mark(0);
}
@Test
public void testSeekBeforeEverything() throws IOException {
Key seekKey = new Key();
Key prevKey = new Key();
Key currKey = null;
MutableByteSequence value = new MutableByteSequence(new byte[64], 0, 0);
RelativeKey.SkippR skippr = RelativeKey.fastSkip(in, seekKey, value, prevKey, currKey, expectedKeys.size());
assertEquals(1, skippr.skipped);
assertEquals(new Key(), skippr.prevKey);
assertEquals(expectedKeys.get(0), skippr.rk.getKey());
assertEquals(expectedValues.get(0).toString(), value.toString());
// ensure we can advance after fastskip
skippr.rk.readFields(in);
assertEquals(expectedKeys.get(1), skippr.rk.getKey());
in.reset();
seekKey = new Key("a", "b", "c", "d", 1);
seekKey.setDeleted(true);
skippr = RelativeKey.fastSkip(in, seekKey, value, prevKey, currKey, expectedKeys.size());
assertEquals(1, skippr.skipped);
assertEquals(new Key(), skippr.prevKey);
assertEquals(expectedKeys.get(0), skippr.rk.getKey());
assertEquals(expectedValues.get(0).toString(), value.toString());
skippr.rk.readFields(in);
assertEquals(expectedKeys.get(1), skippr.rk.getKey());
}
@Test(expected = EOFException.class)
public void testSeekAfterEverythingWrongCount() throws IOException {
Key seekKey = new Key("s", "t", "u", "v", 1);
Key prevKey = new Key();
Key currKey = null;
MutableByteSequence value = new MutableByteSequence(new byte[64], 0, 0);
RelativeKey.fastSkip(in, seekKey, value, prevKey, currKey, expectedKeys.size() + 1);
}
public void testSeekAfterEverything() throws IOException {
Key seekKey = new Key("s", "t", "u", "v", 1);
Key prevKey = new Key();
Key currKey = null;
MutableByteSequence value = new MutableByteSequence(new byte[64], 0, 0);
SkippR skippr = RelativeKey.fastSkip(in, seekKey, value, prevKey, currKey, expectedKeys.size());
assertEquals(expectedKeys.size(), skippr.skipped);
}
@Test
public void testSeekMiddle() throws IOException {
int seekIndex = expectedKeys.size() / 2;
Key seekKey = expectedKeys.get(seekIndex);
Key prevKey = new Key();
Key currKey = null;
MutableByteSequence value = new MutableByteSequence(new byte[64], 0, 0);
RelativeKey.SkippR skippr = RelativeKey.fastSkip(in, seekKey, value, prevKey, currKey, expectedKeys.size());
assertEquals(seekIndex + 1, skippr.skipped);
assertEquals(expectedKeys.get(seekIndex - 1), skippr.prevKey);
assertEquals(expectedKeys.get(seekIndex), skippr.rk.getKey());
assertEquals(expectedValues.get(seekIndex).toString(), value.toString());
skippr.rk.readFields(in);
assertEquals(expectedValues.get(seekIndex + 1).toString(), value.toString());
// try fast skipping to a key that does not exist
in.reset();
Key fKey = expectedKeys.get(seekIndex).followingKey(PartialKey.ROW_COLFAM_COLQUAL);
int i;
for (i = seekIndex; expectedKeys.get(i).compareTo(fKey) < 0; i++) {}
int left = expectedKeys.size();
skippr = RelativeKey.fastSkip(in, expectedKeys.get(i), value, prevKey, currKey, expectedKeys.size());
assertEquals(i + 1, skippr.skipped);
left -= skippr.skipped;
assertEquals(expectedKeys.get(i - 1), skippr.prevKey);
assertEquals(expectedKeys.get(i), skippr.rk.getKey());
assertEquals(expectedValues.get(i).toString(), value.toString());
// try fast skipping to our current location
skippr = RelativeKey.fastSkip(in, expectedKeys.get(i), value, expectedKeys.get(i - 1), expectedKeys.get(i), left);
assertEquals(0, skippr.skipped);
assertEquals(expectedKeys.get(i - 1), skippr.prevKey);
assertEquals(expectedKeys.get(i), skippr.rk.getKey());
assertEquals(expectedValues.get(i).toString(), value.toString());
// try fast skipping 1 column family ahead from our current location, testing fastskip from middle of block as opposed to stating at beginning of block
fKey = expectedKeys.get(i).followingKey(PartialKey.ROW_COLFAM);
int j;
for (j = i; expectedKeys.get(j).compareTo(fKey) < 0; j++) {}
skippr = RelativeKey.fastSkip(in, fKey, value, expectedKeys.get(i - 1), expectedKeys.get(i), left);
assertEquals(j - i, skippr.skipped);
assertEquals(expectedKeys.get(j - 1), skippr.prevKey);
assertEquals(expectedKeys.get(j), skippr.rk.getKey());
assertEquals(expectedValues.get(j).toString(), value.toString());
}
}
| |
/*
* Copyright 2009-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.metaeffekt.core.inventory.processor.reader;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.metaeffekt.core.inventory.processor.model.*;
import org.springframework.util.StringUtils;
import java.util.*;
public class InventoryReader extends AbstractXlsInventoryReader {
private Map<Integer, String> artifactColumnMap = new HashMap<>();
private Map<Integer, String> licenseMetaDataColumnMap = new HashMap<>();
private Map<Integer, String> licenseDataColumnMap = new HashMap<>();
private Map<Integer, String> componentPatternDataColumnMap = new HashMap<>();
private Map<Integer, String> vulnerabilityMetaDataColumnMap = new HashMap<>();
@Override
protected List<String> readArtifactHeader(HSSFRow row) {
return parseColumns(row, artifactColumnMap);
}
@Override
protected void readLicenseMetaDataHeader(HSSFRow row) {
parseColumns(row, licenseMetaDataColumnMap);
}
@Override
protected void readLicenseDataHeader(HSSFRow row) {
parseColumns(row, licenseDataColumnMap);
}
@Override
protected void readComponentPatternDataHeader(HSSFRow row) {
parseColumns(row, componentPatternDataColumnMap);
}
protected List<String> parseColumns(HSSFRow row, Map<Integer, String> map) {
List<String> columnList = new ArrayList<>();
for (int i = 0; i < row.getPhysicalNumberOfCells(); i++) {
HSSFCell cell = row.getCell(i);
if (cell != null) {
String value = cell.getStringCellValue();
map.put(i, value);
columnList.add(value);
}
}
return columnList;
}
@Override
protected Artifact readArtifactMetaData(HSSFRow row) {
Artifact artifact = new Artifact();
Set<String> projects = new LinkedHashSet<>();
artifact.setProjects(projects);
for (int i = 0; i < artifactColumnMap.size(); i++) {
final String columnName = artifactColumnMap.get(i).trim();
final HSSFCell myCell = row.getCell(i);
final String value = myCell != null ? myCell.toString() : null;
// compatibility
if (columnName.equalsIgnoreCase("component / group")) {
if (StringUtils.isEmpty(artifact.getComponent())) {
artifact.setComponent(value);
}
continue;
}
// if the column in not known we store the content in the key/value store
if (StringUtils.hasText(value)) {
artifact.set(columnName, value.trim());
}
}
if (artifact.isValid()) {
return artifact;
}
return null;
}
@Override
protected LicenseMetaData readLicenseMetaData(HSSFRow row) {
final LicenseMetaData licenseMetaData = new LicenseMetaData();
for (int i = 0; i < licenseMetaDataColumnMap.size(); i++) {
final String columnName = licenseMetaDataColumnMap.get(i).trim();
final HSSFCell myCell = row.getCell(i);
final String value = myCell != null ? myCell.toString() : null;
if (columnName.equalsIgnoreCase("component")) {
licenseMetaData.setComponent(value);
continue;
}
if (columnName.equalsIgnoreCase("version")) {
licenseMetaData.setVersion(value);
continue;
}
if (columnName.equalsIgnoreCase("license")) {
licenseMetaData.setLicense(value);
continue;
}
if (columnName.equalsIgnoreCase("license in effect")) {
licenseMetaData.setLicenseInEffect(value);
continue;
}
if (columnName.equalsIgnoreCase("license notice") || columnName.equalsIgnoreCase("text")) {
licenseMetaData.setNotice(value);
continue;
}
if (columnName.equalsIgnoreCase("comment")) {
licenseMetaData.setComment(value);
continue;
}
if (columnName.equalsIgnoreCase("source category")) {
licenseMetaData.setSourceCategory(value);
continue;
}
// if the column in not known we store the content in the key/value store
if (value != null) {
licenseMetaData.set(columnName, value.trim());
}
}
if (licenseMetaData.isValid()) {
return licenseMetaData;
}
return null;
}
@Override
protected ComponentPatternData readComponentPatternData(HSSFRow row) {
final ComponentPatternData componentPatternData = new ComponentPatternData();
Map<Integer, String> map = this.componentPatternDataColumnMap;
for (int i = 0; i < map.size(); i++) {
final String columnName = map.get(i).trim();
final HSSFCell myCell = row.getCell(i);
final String value = myCell != null ? myCell.toString() : null;
if (value != null) {
componentPatternData.set(columnName, value.trim());
}
}
if (componentPatternData.isValid()) {
return componentPatternData;
}
return null;
}
@Override
protected LicenseData readLicenseData(HSSFRow row) {
final LicenseData licenseData = new LicenseData();
Map<Integer, String> map = this.licenseDataColumnMap;
for (int i = 0; i < map.size(); i++) {
final String columnName = map.get(i).trim();
final HSSFCell myCell = row.getCell(i);
final String value = myCell != null ? myCell.toString() : null;
if (value != null) {
licenseData.set(columnName, value.trim());
}
}
if (licenseData.isValid()) {
return licenseData;
}
return null;
}
@Override
protected void readVulnerabilityMetaDataHeader(HSSFRow row) {
parseColumns(row, vulnerabilityMetaDataColumnMap);
}
@Override
protected VulnerabilityMetaData readVulnerabilityMetaData(HSSFRow row) {
final VulnerabilityMetaData vulnerabilityMetaData = new VulnerabilityMetaData();
Map<Integer, String> map = this.vulnerabilityMetaDataColumnMap;
for (int i = 0; i < map.size(); i++) {
final String columnName = map.get(i).trim();
final HSSFCell myCell = row.getCell(i);
final String value = myCell != null ? myCell.toString() : null;
if (value != null) {
vulnerabilityMetaData.set(columnName, value.trim());
}
}
if (vulnerabilityMetaData.isValid()) {
return vulnerabilityMetaData;
}
return null;
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.cf.code;
import com.android.dx.rop.code.LocalItem;
import com.android.dx.rop.code.RegisterSpec;
import com.android.dx.rop.cst.Constant;
import com.android.dx.rop.type.Prototype;
import com.android.dx.rop.type.StdTypeList;
import com.android.dx.rop.type.Type;
import com.android.dx.rop.type.TypeBearer;
import java.util.ArrayList;
/**
* Base implementation of {@link Machine}.
*
* <p><b>Note:</b> For the most part, the documentation for this class
* ignores the distinction between {@link Type} and {@link
* TypeBearer}.</p>
*/
public abstract class BaseMachine implements Machine {
/* {@code non-null;} the prototype for the associated method */
private final Prototype prototype;
/** {@code non-null;} primary arguments */
private TypeBearer[] args;
/** {@code >= 0;} number of primary arguments */
private int argCount;
/** {@code null-ok;} type of the operation, if salient */
private Type auxType;
/** auxiliary {@code int} argument */
private int auxInt;
/** {@code null-ok;} auxiliary constant argument */
private Constant auxCst;
/** auxiliary branch target argument */
private int auxTarget;
/** {@code null-ok;} auxiliary switch cases argument */
private SwitchList auxCases;
/** {@code null-ok;} auxiliary initial value list for newarray */
private ArrayList<Constant> auxInitValues;
/** {@code >= -1;} last local accessed */
private int localIndex;
/** specifies if local has info in the local variable table */
private boolean localInfo;
/** {@code null-ok;} local target spec, if salient and calculated */
private RegisterSpec localTarget;
/** {@code non-null;} results */
private TypeBearer[] results;
/**
* {@code >= -1;} count of the results, or {@code -1} if no results
* have been set
*/
private int resultCount;
/**
* Constructs an instance.
*
* @param prototype {@code non-null;} the prototype for the
* associated method
*/
public BaseMachine(Prototype prototype) {
if (prototype == null) {
throw new NullPointerException("prototype == null");
}
this.prototype = prototype;
args = new TypeBearer[10];
results = new TypeBearer[6];
clearArgs();
}
/** {@inheritDoc} */
@Override
public Prototype getPrototype() {
return prototype;
}
/** {@inheritDoc} */
@Override
public final void clearArgs() {
argCount = 0;
auxType = null;
auxInt = 0;
auxCst = null;
auxTarget = 0;
auxCases = null;
auxInitValues = null;
localIndex = -1;
localInfo = false;
localTarget = null;
resultCount = -1;
}
/** {@inheritDoc} */
@Override
public final void popArgs(Frame frame, int count) {
ExecutionStack stack = frame.getStack();
clearArgs();
if (count > args.length) {
// Grow args, and add a little extra room to grow even more.
args = new TypeBearer[count + 10];
}
for (int i = count - 1; i >= 0; i--) {
args[i] = stack.pop();
}
argCount = count;
}
/** {@inheritDoc} */
@Override
public void popArgs(Frame frame, Prototype prototype) {
StdTypeList types = prototype.getParameterTypes();
int size = types.size();
// Use the above method to do the actual popping...
popArgs(frame, size);
// ...and then verify the popped types.
for (int i = 0; i < size; i++) {
if (! Merger.isPossiblyAssignableFrom(types.getType(i), args[i])) {
throw new SimException("at stack depth " + (size - 1 - i) +
", expected type " + types.getType(i).toHuman() +
" but found " + args[i].getType().toHuman());
}
}
}
@Override
public final void popArgs(Frame frame, Type type) {
// Use the above method to do the actual popping...
popArgs(frame, 1);
// ...and then verify the popped type.
if (! Merger.isPossiblyAssignableFrom(type, args[0])) {
throw new SimException("expected type " + type.toHuman() +
" but found " + args[0].getType().toHuman());
}
}
/** {@inheritDoc} */
@Override
public final void popArgs(Frame frame, Type type1, Type type2) {
// Use the above method to do the actual popping...
popArgs(frame, 2);
// ...and then verify the popped types.
if (! Merger.isPossiblyAssignableFrom(type1, args[0])) {
throw new SimException("expected type " + type1.toHuman() +
" but found " + args[0].getType().toHuman());
}
if (! Merger.isPossiblyAssignableFrom(type2, args[1])) {
throw new SimException("expected type " + type2.toHuman() +
" but found " + args[1].getType().toHuman());
}
}
/** {@inheritDoc} */
@Override
public final void popArgs(Frame frame, Type type1, Type type2,
Type type3) {
// Use the above method to do the actual popping...
popArgs(frame, 3);
// ...and then verify the popped types.
if (! Merger.isPossiblyAssignableFrom(type1, args[0])) {
throw new SimException("expected type " + type1.toHuman() +
" but found " + args[0].getType().toHuman());
}
if (! Merger.isPossiblyAssignableFrom(type2, args[1])) {
throw new SimException("expected type " + type2.toHuman() +
" but found " + args[1].getType().toHuman());
}
if (! Merger.isPossiblyAssignableFrom(type3, args[2])) {
throw new SimException("expected type " + type3.toHuman() +
" but found " + args[2].getType().toHuman());
}
}
/** {@inheritDoc} */
@Override
public final void localArg(Frame frame, int idx) {
clearArgs();
args[0] = frame.getLocals().get(idx);
argCount = 1;
localIndex = idx;
}
/** {@inheritDoc} */
@Override
public final void localInfo(boolean local) {
localInfo = local;
}
/** {@inheritDoc} */
@Override
public final void auxType(Type type) {
auxType = type;
}
/** {@inheritDoc} */
@Override
public final void auxIntArg(int value) {
auxInt = value;
}
/** {@inheritDoc} */
@Override
public final void auxCstArg(Constant cst) {
if (cst == null) {
throw new NullPointerException("cst == null");
}
auxCst = cst;
}
/** {@inheritDoc} */
@Override
public final void auxTargetArg(int target) {
auxTarget = target;
}
/** {@inheritDoc} */
@Override
public final void auxSwitchArg(SwitchList cases) {
if (cases == null) {
throw new NullPointerException("cases == null");
}
auxCases = cases;
}
/** {@inheritDoc} */
@Override
public final void auxInitValues(ArrayList<Constant> initValues) {
auxInitValues = initValues;
}
/** {@inheritDoc} */
@Override
public final void localTarget(int idx, Type type, LocalItem local) {
localTarget = RegisterSpec.makeLocalOptional(idx, type, local);
}
/**
* Gets the number of primary arguments.
*
* @return {@code >= 0;} the number of primary arguments
*/
protected final int argCount() {
return argCount;
}
/**
* Gets the {@code n}th primary argument.
*
* @param n {@code >= 0, < argCount();} which argument
* @return {@code non-null;} the indicated argument
*/
protected final TypeBearer arg(int n) {
if (n >= argCount) {
throw new IllegalArgumentException("n >= argCount");
}
try {
return args[n];
} catch (ArrayIndexOutOfBoundsException ex) {
// Translate the exception.
throw new IllegalArgumentException("n < 0");
}
}
/**
* Gets the type auxiliary argument.
*
* @return {@code null-ok;} the salient type
*/
protected final Type getAuxType() {
return auxType;
}
/**
* Gets the {@code int} auxiliary argument.
*
* @return the argument value
*/
protected final int getAuxInt() {
return auxInt;
}
/**
* Gets the constant auxiliary argument.
*
* @return {@code null-ok;} the argument value
*/
protected final Constant getAuxCst() {
return auxCst;
}
/**
* Gets the branch target auxiliary argument.
*
* @return the argument value
*/
protected final int getAuxTarget() {
return auxTarget;
}
/**
* Gets the switch cases auxiliary argument.
*
* @return {@code null-ok;} the argument value
*/
protected final SwitchList getAuxCases() {
return auxCases;
}
/**
* Gets the init values auxiliary argument.
*
* @return {@code null-ok;} the argument value
*/
protected final ArrayList<Constant> getInitValues() {
return auxInitValues;
}
/**
* Gets the last local index accessed.
*
* @return {@code >= -1;} the salient local index or {@code -1} if none
* was set since the last time {@link #clearArgs} was called
*/
protected final int getLocalIndex() {
return localIndex;
}
/**
* Gets the target local register spec of the current operation, if any.
* The local target spec is the combination of the values indicated
* by a previous call to {@link #localTarget} with the type of what
* should be the sole result set by a call to {@link #setResult} (or
* the combination {@link #clearResult} then {@link #addResult}.
*
* @param isMove {@code true} if the operation being performed on the
* local is a move. This will cause constant values to be propagated
* to the returned local
* @return {@code null-ok;} the salient register spec or {@code null} if no
* local target was set since the last time {@link #clearArgs} was
* called
*/
protected final RegisterSpec getLocalTarget(boolean isMove) {
if (localTarget == null) {
return null;
}
if (resultCount != 1) {
throw new SimException("local target with " +
((resultCount == 0) ? "no" : "multiple") + " results");
}
TypeBearer result = results[0];
Type resultType = result.getType();
Type localType = localTarget.getType();
if (resultType == localType) {
/*
* If this is to be a move operation and the result is a
* known value, make the returned localTarget embody that
* value.
*/
if (isMove) {
return localTarget.withType(result);
} else {
return localTarget;
}
}
if (! Merger.isPossiblyAssignableFrom(localType, resultType)) {
// The result and local types are inconsistent. Complain!
throwLocalMismatch(resultType, localType);
return null;
}
if (localType == Type.OBJECT) {
/*
* The result type is more specific than the local type,
* so use that instead.
*/
localTarget = localTarget.withType(result);
}
return localTarget;
}
/**
* Clears the results.
*/
protected final void clearResult() {
resultCount = 0;
}
/**
* Sets the results list to be the given single value.
*
* <p><b>Note:</b> If there is more than one result value, the
* others may be added by using {@link #addResult}.</p>
*
* @param result {@code non-null;} result value
*/
protected final void setResult(TypeBearer result) {
if (result == null) {
throw new NullPointerException("result == null");
}
results[0] = result;
resultCount = 1;
}
/**
* Adds an additional element to the list of results.
*
* @see #setResult
*
* @param result {@code non-null;} result value
*/
protected final void addResult(TypeBearer result) {
if (result == null) {
throw new NullPointerException("result == null");
}
results[resultCount] = result;
resultCount++;
}
/**
* Gets the count of results. This throws an exception if results were
* never set. (Explicitly clearing the results counts as setting them.)
*
* @return {@code >= 0;} the count
*/
protected final int resultCount() {
if (resultCount < 0) {
throw new SimException("results never set");
}
return resultCount;
}
/**
* Gets the {@code n}th result value.
*
* @param n {@code >= 0, < resultCount();} which result
* @return {@code non-null;} the indicated result value
*/
protected final TypeBearer result(int n) {
if (n >= resultCount) {
throw new IllegalArgumentException("n >= resultCount");
}
try {
return results[n];
} catch (ArrayIndexOutOfBoundsException ex) {
// Translate the exception.
throw new IllegalArgumentException("n < 0");
}
}
/**
* Stores the results of the latest operation into the given frame. If
* there is a local target (see {@link #localTarget}), then the sole
* result is stored to that target; otherwise any results are pushed
* onto the stack.
*
* @param frame {@code non-null;} frame to operate on
*/
protected final void storeResults(Frame frame) {
if (resultCount < 0) {
throw new SimException("results never set");
}
if (resultCount == 0) {
// Nothing to do.
return;
}
if (localTarget != null) {
/*
* Note: getLocalTarget() doesn't necessarily return
* localTarget directly.
*/
frame.getLocals().set(getLocalTarget(false));
} else {
ExecutionStack stack = frame.getStack();
for (int i = 0; i < resultCount; i++) {
if (localInfo) {
stack.setLocal();
}
stack.push(results[i]);
}
}
}
/**
* Throws an exception that indicates a mismatch in local variable
* types.
*
* @param found {@code non-null;} the encountered type
* @param local {@code non-null;} the local variable's claimed type
*/
public static void throwLocalMismatch(TypeBearer found,
TypeBearer local) {
throw new SimException("local variable type mismatch: " +
"attempt to set or access a value of type " +
found.toHuman() +
" using a local variable of type " +
local.toHuman() +
". This is symptomatic of .class transformation tools " +
"that ignore local variable information.");
}
}
| |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.mledger.impl;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.mledger.Entry;
import org.apache.bookkeeper.mledger.ManagedCursor;
import org.apache.bookkeeper.mledger.ManagedLedgerFactoryConfig;
import org.apache.bookkeeper.test.MockedBookKeeperTestCase;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test
public class EntryCacheManagerTest extends MockedBookKeeperTestCase {
ManagedLedgerImpl ml1;
ManagedLedgerImpl ml2;
@BeforeClass
void setup() throws Exception {
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
ml1 = mock(ManagedLedgerImpl.class);
when(ml1.getScheduledExecutor()).thenReturn(executor);
when(ml1.getName()).thenReturn("cache1");
ml2 = mock(ManagedLedgerImpl.class);
when(ml2.getScheduledExecutor()).thenReturn(executor);
when(ml2.getName()).thenReturn("cache2");
}
@Test
void simple() throws Exception {
ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig();
config.setMaxCacheSize(10);
config.setCacheEvictionWatermark(0.8);
factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config);
EntryCacheManager cacheManager = factory.getEntryCacheManager();
EntryCache cache1 = cacheManager.getEntryCache(ml1);
EntryCache cache2 = cacheManager.getEntryCache(ml2);
cache1.insert(new EntryImpl(1, 1, new byte[4]));
cache1.insert(new EntryImpl(1, 0, new byte[3]));
assertEquals(cache1.getSize(), 7);
assertEquals(cacheManager.getSize(), 7);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 10);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 7);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
cache2.insert(new EntryImpl(2, 0, new byte[1]));
cache2.insert(new EntryImpl(2, 1, new byte[1]));
cache2.insert(new EntryImpl(2, 2, new byte[1]));
assertEquals(cache2.getSize(), 3);
assertEquals(cacheManager.getSize(), 10);
// Next insert should trigger a cache eviction to force the size to 8
// The algorithm should evict entries from cache1
cache2.insert(new EntryImpl(2, 3, new byte[1]));
// Wait for eviction to be completed in background
Thread.sleep(100);
assertEquals(cacheManager.getSize(), 7);
assertEquals(cache1.getSize(), 4);
assertEquals(cache2.getSize(), 3);
cacheManager.removeEntryCache("cache1");
assertEquals(cacheManager.getSize(), 3);
assertEquals(cache2.getSize(), 3);
// Should remove 2 entries
cache2.invalidateEntries(new PositionImpl(2, 1));
assertEquals(cacheManager.getSize(), 1);
assertEquals(cache2.getSize(), 1);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 10);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 1);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 1);
}
@Test
void doubleInsert() throws Exception {
ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig();
config.setMaxCacheSize(10);
config.setCacheEvictionWatermark(0.8);
factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config);
EntryCacheManager cacheManager = factory.getEntryCacheManager();
EntryCache cache1 = cacheManager.getEntryCache(ml1);
assertEquals(cache1.insert(new EntryImpl(1, 1, new byte[4])), true);
assertEquals(cache1.insert(new EntryImpl(1, 0, new byte[3])), true);
assertEquals(cache1.getSize(), 7);
assertEquals(cacheManager.getSize(), 7);
assertEquals(cache1.insert(new EntryImpl(1, 0, new byte[5])), false);
assertEquals(cache1.getSize(), 7);
assertEquals(cacheManager.getSize(), 7);
}
@Test
void cacheDisabled() throws Exception {
ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig();
config.setMaxCacheSize(0);
config.setCacheEvictionWatermark(0.8);
factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config);
EntryCacheManager cacheManager = factory.getEntryCacheManager();
EntryCache cache1 = cacheManager.getEntryCache(ml1);
EntryCache cache2 = cacheManager.getEntryCache(ml2);
assertTrue(cache1 instanceof EntryCacheManager.EntryCacheDisabled);
assertTrue(cache2 instanceof EntryCacheManager.EntryCacheDisabled);
cache1.insert(new EntryImpl(1, 1, new byte[4]));
cache1.insert(new EntryImpl(1, 0, new byte[3]));
assertEquals(cache1.getSize(), 0);
assertEquals(cacheManager.getSize(), 0);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 0);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
cache2.insert(new EntryImpl(2, 0, new byte[1]));
cache2.insert(new EntryImpl(2, 1, new byte[1]));
cache2.insert(new EntryImpl(2, 2, new byte[1]));
assertEquals(cache2.getSize(), 0);
assertEquals(cacheManager.getSize(), 0);
}
@Test
void verifyNoCacheIfNoConsumer() throws Exception {
ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig();
config.setMaxCacheSize(7 * 10);
config.setCacheEvictionWatermark(0.8);
factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config);
EntryCacheManager cacheManager = factory.getEntryCacheManager();
ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("ledger");
EntryCache cache1 = ledger.entryCache;
for (int i = 0; i < 10; i++) {
ledger.addEntry(("entry-" + i).getBytes());
}
assertEquals(cache1.getSize(), 0);
assertEquals(cacheManager.getSize(), 0);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheMaxSize(), 7 * 10);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
}
@Test
void verifyHitsMisses() throws Exception {
ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig();
config.setMaxCacheSize(7 * 10);
config.setCacheEvictionWatermark(0.8);
factory = new ManagedLedgerFactoryImpl(bkc, bkc.getZkHandle(), config);
EntryCacheManager cacheManager = factory.getEntryCacheManager();
ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("ledger");
ManagedCursorImpl c1 = (ManagedCursorImpl) ledger.openCursor("c1");
ManagedCursorImpl c2 = (ManagedCursorImpl) ledger.openCursor("c2");
for (int i = 0; i < 10; i++) {
ledger.addEntry(("entry-" + i).getBytes());
}
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
List<Entry> entries = c1.readEntries(10);
assertEquals(entries.size(), 10);
entries.forEach(e -> e.release());
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 10.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 70.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
ledger.deactivateCursor(c1);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
entries = c2.readEntries(10);
assertEquals(entries.size(), 10);
entries.forEach(e -> e.release());
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 70);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 10.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 70.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
PositionImpl pos = (PositionImpl) entries.get(entries.size() - 1).getPosition();
c2.setReadPosition(pos);
ledger.discardEntriesFromCache(c2, pos);
cacheManager.mlFactoryMBean.refreshStats(1, TimeUnit.SECONDS);
assertEquals(cacheManager.mlFactoryMBean.getCacheUsedSize(), 0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheMissesRate(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getCacheHitsThroughput(), 0.0);
assertEquals(cacheManager.mlFactoryMBean.getNumberOfCacheEvictions(), 0);
}
}
| |
/*
* Copyright 1997-2017 Optimatika (www.optimatika.se)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.algo.optimisation.integer;
import static org.algo.constant.PrimitiveMath.*;
import static org.algo.function.PrimitiveFunction.*;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import org.algo.access.Access1D;
import org.algo.function.aggregator.Aggregator;
import org.algo.matrix.store.MatrixStore;
import org.algo.matrix.store.PrimitiveDenseStore;
import org.algo.netio.BasicLogger;
import org.algo.netio.CharacterRing;
import org.algo.netio.CharacterRing.PrinterBuffer;
import org.algo.optimisation.ExpressionsBasedModel;
import org.algo.optimisation.Optimisation;
import org.algo.optimisation.Variable;
import org.algo.type.TypeUtils;
/**
* IntegerSolver
*
* @author apete
*/
public final class OldIntegerSolver extends IntegerSolver {
final class BranchAndBoundNodeTask extends RecursiveTask<Boolean> {
private final NodeKey myKey;
private final PrinterBuffer myPrinter = OldIntegerSolver.this.isDebug() ? new CharacterRing().asPrinter() : null;
private BranchAndBoundNodeTask(final NodeKey key) {
super();
myKey = key;
}
BranchAndBoundNodeTask() {
super();
myKey = new NodeKey(OldIntegerSolver.this.getModel());
}
@Override
public String toString() {
return myKey.toString();
}
private boolean isNodeDebug() {
return (myPrinter != null) && OldIntegerSolver.this.isDebug();
}
@Override
protected Boolean compute() {
if (this.isNodeDebug()) {
myPrinter.println("\nBranch&Bound Node");
myPrinter.println(myKey.toString());
myPrinter.println(OldIntegerSolver.this.toString());
}
if (!OldIntegerSolver.this.isIterationAllowed() || !OldIntegerSolver.this.isIterationNecessary()) {
if (this.isNodeDebug()) {
myPrinter.println("Reached iterations or time limit - stop!");
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
return false;
}
if (OldIntegerSolver.this.isExplored(this)) {
if (this.isNodeDebug()) {
myPrinter.println("Node previously explored!");
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
return true;
} else {
OldIntegerSolver.this.markAsExplored(this);
}
if (!OldIntegerSolver.this.isGoodEnoughToContinueBranching(myKey.objective)) {
if (this.isNodeDebug()) {
myPrinter.println("No longer a relevant node!");
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
return true;
}
ExpressionsBasedModel tmpNodeModel = this.getModel();
final Result tmpBestResultSoFar = OldIntegerSolver.this.getBestResultSoFar();
final Optimisation.Result tmpNodeResult = tmpNodeModel.solve(tmpBestResultSoFar);
if (this.isNodeDebug()) {
myPrinter.println("Node Result: {}", tmpNodeResult);
}
OldIntegerSolver.this.incrementIterationsCount();
if (tmpNodeResult.getState().isOptimal()) {
if (this.isNodeDebug()) {
myPrinter.println("Node solved to optimality!");
}
if (OldIntegerSolver.this.options.validate && !tmpNodeModel.validate(tmpNodeResult)) {
// This should not be possible. There is a bug somewhere.
myPrinter.println("Node solution marked as OPTIMAL, but is actually INVALID/INFEASIBLE/FAILED. Stop this branch!");
myPrinter.println("Lower bounds: {}", Arrays.toString(myKey.getLowerBounds()));
myPrinter.println("Upper bounds: {}", Arrays.toString(myKey.getUpperBounds()));
tmpNodeModel.validate(tmpNodeResult, myPrinter);
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
return false;
}
final int tmpBranchIndex = OldIntegerSolver.this.identifyNonIntegerVariable(tmpNodeResult, myKey);
final double tmpSolutionValue = OldIntegerSolver.this.evaluateFunction(tmpNodeResult);
if (tmpBranchIndex == -1) {
if (this.isNodeDebug()) {
myPrinter.println("Integer solution! Store it among the others, and stop this branch!");
}
final Optimisation.Result tmpIntegerSolutionResult = new Optimisation.Result(Optimisation.State.FEASIBLE, tmpSolutionValue, tmpNodeResult);
OldIntegerSolver.this.markInteger(myKey, tmpIntegerSolutionResult);
if (this.isNodeDebug()) {
myPrinter.println(OldIntegerSolver.this.getBestResultSoFar().toString());
BasicLogger.debug();
BasicLogger.debug(OldIntegerSolver.this.toString());
// BasicLogger.debug(DaemonPoolExecutor.INSTANCE.toString());
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
} else {
if (this.isNodeDebug()) {
myPrinter.println("Not an Integer Solution: " + tmpSolutionValue);
}
final double tmpVariableValue = tmpNodeResult.doubleValue(OldIntegerSolver.this.getGlobalIndex(tmpBranchIndex));
if (OldIntegerSolver.this.isGoodEnoughToContinueBranching(tmpSolutionValue)) {
if (this.isNodeDebug()) {
myPrinter.println("Still hope, branching on {} @ {} >>> {}", tmpBranchIndex, tmpVariableValue,
tmpNodeModel.getVariable(OldIntegerSolver.this.getGlobalIndex(tmpBranchIndex)));
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
tmpNodeModel.dispose();
tmpNodeModel = null;
final BranchAndBoundNodeTask tmpLowerBranchTask = this.createLowerBranch(tmpBranchIndex, tmpVariableValue, tmpSolutionValue);
final BranchAndBoundNodeTask tmpUpperBranchTask = this.createUpperBranch(tmpBranchIndex, tmpVariableValue, tmpSolutionValue);
// return tmpLowerBranchTask.compute() && tmpUpperBranchTask.compute();
tmpUpperBranchTask.fork();
final boolean tmpLowerBranchValue = tmpLowerBranchTask.compute();
final boolean tmpUpperBranchValue = tmpUpperBranchTask.join();
return tmpLowerBranchValue & tmpUpperBranchValue;
// if (tmpLowerBranchValue) {
//
//
// return tmpUpperBranchValue;
// } else {
// tmpUpperBranchTask.tryUnfork();
// tmpUpperBranchTask.cancel(true);
// return false;
// }
} else {
if (this.isNodeDebug()) {
myPrinter.println("Can't find better integer solutions - stop this branch!");
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
}
}
} else {
if (this.isNodeDebug()) {
myPrinter.println("Failed to solve node problem - stop this branch!");
this.flush(OldIntegerSolver.this.getModel().options.debug_appender);
}
}
return true;
}
BranchAndBoundNodeTask createLowerBranch(final int branchIndex, final double nonIntegerValue, final double parentObjectiveValue) {
final NodeKey tmpKey = myKey.createLowerBranch(branchIndex, nonIntegerValue, parentObjectiveValue);
return new BranchAndBoundNodeTask(tmpKey);
}
BranchAndBoundNodeTask createUpperBranch(final int branchIndex, final double nonIntegerValue, final double parentObjectiveValue) {
final NodeKey tmpKey = myKey.createUpperBranch(branchIndex, nonIntegerValue, parentObjectiveValue);
return new BranchAndBoundNodeTask(tmpKey);
}
void flush(final BasicLogger.Printer receiver) {
if ((myPrinter != null) && (receiver != null)) {
myPrinter.flush(receiver);
}
}
NodeKey getKey() {
return myKey;
}
ExpressionsBasedModel getModel() {
final ExpressionsBasedModel retVal = OldIntegerSolver.this.getModel().relax(false);
// if (retVal.options.debug_appender != null) {
// retVal.options.debug_appender = new CharacterRing().asPrinter();
// }
final int[] tmpIntegerIndeces = OldIntegerSolver.this.getIntegerIndeces();
for (int i = 0; i < tmpIntegerIndeces.length; i++) {
final BigDecimal tmpLowerBound = myKey.getLowerBound(i);
final BigDecimal tmpUpperBound = myKey.getUpperBound(i);
final Variable tmpVariable = retVal.getVariable(tmpIntegerIndeces[i]);
tmpVariable.lower(tmpLowerBound);
tmpVariable.upper(tmpUpperBound);
BigDecimal tmpValue = tmpVariable.getValue();
if (tmpValue != null) {
if (tmpLowerBound != null) {
tmpValue = tmpValue.max(tmpLowerBound);
}
if (tmpUpperBound != null) {
tmpValue = tmpValue.min(tmpUpperBound);
}
tmpVariable.setValue(tmpValue);
}
}
if (OldIntegerSolver.this.isIntegerSolutionFound()) {
final double tmpBestValue = OldIntegerSolver.this.getBestResultSoFar().getValue();
final double tmpGap = ABS.invoke(tmpBestValue * OldIntegerSolver.this.options.mip_gap);
if (retVal.isMinimisation()) {
retVal.limitObjective(null, TypeUtils.toBigDecimal(tmpBestValue - tmpGap, OldIntegerSolver.this.options.problem));
} else {
retVal.limitObjective(TypeUtils.toBigDecimal(tmpBestValue + tmpGap, OldIntegerSolver.this.options.problem), null);
}
}
return retVal;
}
}
private final Set<NodeKey> myExploredNodes = Collections.synchronizedSet(new HashSet<NodeKey>());
private final int[] myIntegerIndeces;
OldIntegerSolver(final ExpressionsBasedModel model, final Options solverOptions) {
super(model, solverOptions);
final List<Variable> tmpIntegerVariables = model.getIntegerVariables();
myIntegerIndeces = new int[tmpIntegerVariables.size()];
for (int i = 0; i < myIntegerIndeces.length; i++) {
final Variable tmpVariable = tmpIntegerVariables.get(i);
myIntegerIndeces[i] = model.indexOf(tmpVariable);
}
//options.debug = System.out;
}
public Result solve(final Result kickStarter) {
// Must verify that it actually is an integer solution
// The kickStarter may be user-supplied
if ((kickStarter != null) && kickStarter.getState().isFeasible() && this.getModel().validate(kickStarter)) {
this.markInteger(null, kickStarter);
}
this.resetIterationsCount();
final BranchAndBoundNodeTask tmpNodeTask = new BranchAndBoundNodeTask();
final boolean tmpNormalExit = ForkJoinPool.commonPool().invoke(tmpNodeTask);
Optimisation.Result retVal = this.getBestResultSoFar();
if (retVal.getState().isFeasible()) {
if (tmpNormalExit) {
retVal = new Optimisation.Result(State.OPTIMAL, retVal);
} else {
retVal = new Optimisation.Result(State.FEASIBLE, retVal);
}
} else {
if (tmpNormalExit) {
retVal = new Optimisation.Result(State.INFEASIBLE, retVal);
} else {
retVal = new Optimisation.Result(State.FAILED, retVal);
}
}
return retVal;
}
@Override
public String toString() {
return TypeUtils.format("Solutions={} Nodes/Iterations={} {}", this.countIntegerSolutions(), this.countExploredNodes(), this.getBestResultSoFar());
}
@Override
protected MatrixStore<Double> extractSolution() {
return PrimitiveDenseStore.FACTORY.columns(this.getBestResultSoFar());
}
@Override
protected boolean initialise(final Result kickStarter) {
return true;
}
@Override
protected boolean needsAnotherIteration() {
return !this.getState().isOptimal();
}
@Override
protected boolean validate() {
boolean retVal = true;
this.setState(State.VALID);
try {
if (!(retVal = this.getModel().validate())) {
retVal = false;
this.setState(State.INVALID);
}
} catch (final Exception ex) {
retVal = false;
this.setState(State.FAILED);
}
return retVal;
}
int countExploredNodes() {
return myExploredNodes.size();
}
int getGlobalIndex(final int integerIndex) {
return myIntegerIndeces[integerIndex];
}
final int[] getIntegerIndeces() {
return myIntegerIndeces;
}
/**
* Should return the index of the (best) variable to branch on. Returning a negative index means an
* integer solition has been found (no further branching).
*/
int identifyNonIntegerVariable(final Optimisation.Result nodeResult, final NodeKey nodeKey) {
int retVal = -1;
double tmpFraction;
double tmpScale;
double tmpMaxFraction = ZERO;
for (int i = 0; i < myIntegerIndeces.length; i++) {
tmpFraction = nodeKey.getFraction(i, nodeResult.doubleValue(myIntegerIndeces[i]));
if (this.isIntegerSolutionFound()) {
final MatrixStore<Double> tmpGradient = this.getGradient(Access1D.asPrimitive1D(nodeResult));
if ((tmpScale = tmpGradient.aggregateAll(Aggregator.LARGEST)) > ZERO) {
tmpFraction *= (ONE + (ABS.invoke(tmpGradient.doubleValue(myIntegerIndeces[i])) / tmpScale));
}
}
if ((tmpFraction > tmpMaxFraction) && !options.integer.isZero(tmpFraction)) {
retVal = i;
tmpMaxFraction = tmpFraction;
}
}
return retVal;
}
boolean isExplored(final BranchAndBoundNodeTask aNodeTask) {
return myExploredNodes.contains(aNodeTask.getKey());
}
void markAsExplored(final BranchAndBoundNodeTask aNodeTask) {
myExploredNodes.add(aNodeTask.getKey());
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.recoveryservicesbackup.implementation;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.recoveryservicesbackup.fluent.ProtectableContainersClient;
import com.azure.resourcemanager.recoveryservicesbackup.fluent.models.ProtectableContainerResourceInner;
import com.azure.resourcemanager.recoveryservicesbackup.models.ProtectableContainerResourceList;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in ProtectableContainersClient. */
public final class ProtectableContainersClientImpl implements ProtectableContainersClient {
private final ClientLogger logger = new ClientLogger(ProtectableContainersClientImpl.class);
/** The proxy service used to perform REST calls. */
private final ProtectableContainersService service;
/** The service client containing this operation class. */
private final RecoveryServicesBackupClientImpl client;
/**
* Initializes an instance of ProtectableContainersClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
ProtectableContainersClientImpl(RecoveryServicesBackupClientImpl client) {
this.service =
RestProxy
.create(ProtectableContainersService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for RecoveryServicesBackupClientProtectableContainers to be used by the
* proxy service to perform REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "RecoveryServicesBack")
private interface ProtectableContainersService {
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices"
+ "/vaults/{vaultName}/backupFabrics/{fabricName}/protectableContainers")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<ProtectableContainerResourceList>> list(
@HostParam("$host") String endpoint,
@QueryParam("api-version") String apiVersion,
@PathParam("vaultName") String vaultName,
@PathParam("resourceGroupName") String resourceGroupName,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("fabricName") String fabricName,
@QueryParam("$filter") String filter,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<ProtectableContainerResourceList>> listNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @param filter OData filter options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources along with {@link PagedResponse} on successful completion of
* {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ProtectableContainerResourceInner>> listSinglePageAsync(
String vaultName, String resourceGroupName, String fabricName, String filter) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (vaultName == null) {
return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (fabricName == null) {
return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getApiVersion(),
vaultName,
resourceGroupName,
this.client.getSubscriptionId(),
fabricName,
filter,
accept,
context))
.<PagedResponse<ProtectableContainerResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @param filter OData filter options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources along with {@link PagedResponse} on successful completion of
* {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ProtectableContainerResourceInner>> listSinglePageAsync(
String vaultName, String resourceGroupName, String fabricName, String filter, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (vaultName == null) {
return Mono.error(new IllegalArgumentException("Parameter vaultName is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (fabricName == null) {
return Mono.error(new IllegalArgumentException("Parameter fabricName is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.list(
this.client.getEndpoint(),
this.client.getApiVersion(),
vaultName,
resourceGroupName,
this.client.getSubscriptionId(),
fabricName,
filter,
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @param filter OData filter options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources as paginated response with {@link PagedFlux}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<ProtectableContainerResourceInner> listAsync(
String vaultName, String resourceGroupName, String fabricName, String filter) {
return new PagedFlux<>(
() -> listSinglePageAsync(vaultName, resourceGroupName, fabricName, filter),
nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources as paginated response with {@link PagedFlux}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<ProtectableContainerResourceInner> listAsync(
String vaultName, String resourceGroupName, String fabricName) {
final String filter = null;
return new PagedFlux<>(
() -> listSinglePageAsync(vaultName, resourceGroupName, fabricName, filter),
nextLink -> listNextSinglePageAsync(nextLink));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @param filter OData filter options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources as paginated response with {@link PagedFlux}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<ProtectableContainerResourceInner> listAsync(
String vaultName, String resourceGroupName, String fabricName, String filter, Context context) {
return new PagedFlux<>(
() -> listSinglePageAsync(vaultName, resourceGroupName, fabricName, filter, context),
nextLink -> listNextSinglePageAsync(nextLink, context));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<ProtectableContainerResourceInner> list(
String vaultName, String resourceGroupName, String fabricName) {
final String filter = null;
return new PagedIterable<>(listAsync(vaultName, resourceGroupName, fabricName, filter));
}
/**
* Lists the containers that can be registered to Recovery Services Vault.
*
* @param vaultName The name of the recovery services vault.
* @param resourceGroupName The name of the resource group where the recovery services vault is present.
* @param fabricName The fabricName parameter.
* @param filter OData filter options.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources as paginated response with {@link PagedIterable}.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<ProtectableContainerResourceInner> list(
String vaultName, String resourceGroupName, String fabricName, String filter, Context context) {
return new PagedIterable<>(listAsync(vaultName, resourceGroupName, fabricName, filter, context));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources along with {@link PagedResponse} on successful completion of
* {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ProtectableContainerResourceInner>> listNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<ProtectableContainerResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of ProtectableContainer resources along with {@link PagedResponse} on successful completion of
* {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ProtectableContainerResourceInner>> listNextSinglePageAsync(
String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| |
/*
Derby - Class org.apache.derby.impl.jdbc.authentication.AuthenticationServiceBase
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.jdbc.authentication;
import org.apache.derby.authentication.UserAuthenticator;
import org.apache.derby.shared.common.reference.Property;
import org.apache.derby.iapi.jdbc.AuthenticationService;
import org.apache.derby.shared.common.reference.Limits;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.services.context.Context;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.daemon.Serviceable;
import org.apache.derby.iapi.services.monitor.ModuleSupportable;
import org.apache.derby.iapi.services.monitor.ModuleControl;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.store.access.AccessFactory;
import org.apache.derby.iapi.services.property.PropertyFactory;
import org.apache.derby.iapi.store.access.TransactionController;
import org.apache.derby.iapi.services.property.PropertySetCallback;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.shared.common.reference.Attribute;
import org.apache.derby.iapi.services.property.PropertyUtil;
import org.apache.derby.iapi.util.StringUtil;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.PasswordHasher;
import org.apache.derby.iapi.sql.dictionary.UserDescriptor;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedAction;
import java.security.AccessController;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.Dictionary;
import java.util.Properties;
import org.apache.derby.shared.common.reference.SQLState;
/**
* <p>
* This is the authentication service base class.
* </p>
* <p>
* There can be 1 Authentication Service for the whole Derby
* system and/or 1 authentication per database.
* In a near future, we intend to allow multiple authentication services
* per system and/or per database.
* </p>
*
* <p>
* It should be extended by the specialized authentication services.
* </p>
*
* <p><strong>IMPORTANT NOTE:</strong></p>
*
* <p>
* User passwords are hashed using a message digest algorithm
* if they're stored in the database. They are not hashed
* if they were defined at the system level.
* </p>
*
* <p>
* The passwords can be hashed using two different schemes:
* </p>
*
* <ul>
* <li>The SHA-1 authentication scheme, which was the only available scheme
* in Derby 10.5 and earlier. This scheme uses the SHA-1 message digest
* algorithm.</li>
* <li>The configurable hash authentication scheme, which allows the users to
* specify which message digest algorithm to use.</li>
* </ul>
*
* <p>
* In order to use the configurable hash authentication scheme, the users have
* to set the {@code derby.authentication.builtin.algorithm} property (on
* system level or database level) to the name of an algorithm that's available
* in one of the security providers registered on the system. If this property
* is not set, or if it's set to NULL or an empty string, the SHA-1
* authentication scheme is used.
* </p>
*
* <p>
* Which scheme to use is decided when a password is about to be stored in the
* database. One database may therefore contain passwords stored using
* different schemes. In order to determine which scheme to use when comparing
* a user's credentials with those stored in the database, the stored password
* is prefixed with an identifier that tells which scheme is being used.
* Passwords stored using the SHA-1 authentication scheme are prefixed with
* {@link PasswordHasher#ID_PATTERN_SHA1_SCHEME}. Passwords that are stored using the
* configurable hash authentication scheme are prefixed with
* {@link PasswordHasher#ID_PATTERN_CONFIGURABLE_HASH_SCHEME} and suffixed with the name of
* the message digest algorithm.
* </p>
*/
public abstract class AuthenticationServiceBase
implements AuthenticationService, ModuleControl, ModuleSupportable, PropertySetCallback {
protected UserAuthenticator authenticationScheme;
// required to retrieve service properties
private AccessFactory store;
/**
Trace flag to trace authentication operations
*/
public static final String AuthenticationTrace =
SanityManager.DEBUG ? "AuthenticationTrace" : null;
/**
Userid with Strong password substitute DRDA security mechanism
*/
protected static final int SECMEC_USRSSBPWD = 8;
//
// constructor
//
public AuthenticationServiceBase() {
}
protected void setAuthenticationService(UserAuthenticator aScheme) {
// specialized class is the principal caller.
this.authenticationScheme = aScheme;
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(this.authenticationScheme != null,
"There is no authentication scheme for that service!");
if (SanityManager.DEBUG_ON(AuthenticationTrace)) {
java.io.PrintWriter iDbgStream =
SanityManager.GET_DEBUG_STREAM();
iDbgStream.println("Authentication Service: [" +
this.toString() + "]");
iDbgStream.println("Authentication Scheme : [" +
this.authenticationScheme.toString() + "]");
}
}
}
/**
/*
** Methods of module control - To be overriden
*/
/**
Start this module. In this case, nothing needs to be done.
@see org.apache.derby.iapi.services.monitor.ModuleControl#boot
@exception StandardException upon failure to load/boot
the expected authentication service.
*/
public void boot(boolean create, Properties properties)
throws StandardException
{
//
// we expect the Access factory to be available since we're
// at boot stage.
//
store = (AccessFactory)
getServiceModule(this, AccessFactory.MODULE);
// register to be notified upon db properties changes
// _only_ if we're on a database context of course :)
PropertyFactory pf = (PropertyFactory)
getServiceModule(this, org.apache.derby.shared.common.reference.Module.PropertyFactory);
if (pf != null)
pf.addPropertySetNotification(this);
}
/**
* @see org.apache.derby.iapi.services.monitor.ModuleControl#stop
*/
public void stop() {
// nothing special to be done yet.
}
/*
** Methods of AuthenticationService
*/
/**
* Authenticate a User inside JBMS.T his is an overload method.
*
* We're passed-in a Properties object containing user credentials information
* (as well as database name if user needs to be validated for a certain
* database access).
*
* @see
* org.apache.derby.iapi.jdbc.AuthenticationService#authenticate
*
*
*/
public boolean authenticate(String databaseName, Properties userInfo) throws java.sql.SQLException
{
if (userInfo == (Properties) null)
return false;
String userName = userInfo.getProperty(Attribute.USERNAME_ATTR);
if ((userName != null) && userName.length() > Limits.MAX_IDENTIFIER_LENGTH) {
return false;
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON(AuthenticationTrace)) {
java.io.PrintWriter iDbgStream =
SanityManager.GET_DEBUG_STREAM();
iDbgStream.println(
" - Authentication request: user [" +
userName + "]"+ ", database [" +
databaseName + "]");
// The following will print the stack trace of the
// authentication request to the log.
//Throwable t = new Throwable();
//istream.println("Authentication Request Stack trace:");
//t.printStackTrace(istream.getPrintWriter());
}
}
return this.authenticationScheme.authenticateUser(userName,
userInfo.getProperty(Attribute.PASSWORD_ATTR),
databaseName,
userInfo
);
}
public String getSystemCredentialsDatabaseName() { return null; }
/**
* Returns a property if it was set at the database or
* system level. Treated as SERVICE property by default.
*
* @return a property string value.
**/
public String getProperty(String key) {
String propertyValue = null;
TransactionController tc = null;
try {
tc = getTransaction();
propertyValue =
PropertyUtil.getServiceProperty(tc,
key,
(String) null);
if (tc != null) {
tc.commit();
tc = null;
}
} catch (StandardException se) {
// Do nothing and just return
}
return propertyValue;
}
/**
* <p>
* Get a transaction for performing authentication at the database level.
* </p>
*/
protected TransactionController getTransaction()
throws StandardException
{
if ( store == null ) { return null; }
else
{
return store.getTransaction( getContextService().getCurrentContextManager() );
}
}
/**
* Get all the database properties.
* @return the database properties, or {@code null} if there is no
* access factory
*/
Properties getDatabaseProperties() throws StandardException {
Properties props = null;
TransactionController tc = getTransaction();
if (tc != null) {
try {
props = tc.getProperties();
} finally {
tc.commit();
}
}
return props;
}
/**
* <p>
* Get the name of the database if we are performing authentication at the database level.
* </p>
*/
protected String getServiceName()
{
if ( store == null ) { return null; }
else { return getServiceName( store ); }
}
public String getDatabaseProperty(String key) {
String propertyValue = null;
TransactionController tc = null;
try {
if (store != null)
tc = store.getTransaction(
getContextService().getCurrentContextManager());
propertyValue =
PropertyUtil.getDatabaseProperty(tc, key);
if (tc != null) {
tc.commit();
tc = null;
}
} catch (StandardException se) {
// Do nothing and just return
}
return propertyValue;
}
public String getSystemProperty(String key) {
boolean dbOnly = false;
dbOnly = Boolean.valueOf(
this.getDatabaseProperty(
Property.DATABASE_PROPERTIES_ONLY)).booleanValue();
if (dbOnly)
return null;
return PropertyUtil.getSystemProperty(key);
}
/*
** Methods of PropertySetCallback
*/
public void init(boolean dbOnly, Dictionary p) {
// not called yet ...
}
/**
@see PropertySetCallback#validate
*/
public boolean validate(String key, Serializable value, Dictionary p)
throws StandardException
{
// user password properties need to be remapped. nothing else needs remapping.
if ( key.startsWith(org.apache.derby.shared.common.reference.Property.USER_PROPERTY_PREFIX) ) { return true; }
String stringValue = (String) value;
boolean settingToNativeLocal = Property.AUTHENTICATION_PROVIDER_NATIVE_LOCAL.equals( stringValue );
if ( Property.AUTHENTICATION_PROVIDER_PARAMETER.equals( key ) )
{
// NATIVE + LOCAL is the only value of this property which can be persisted
if (
( stringValue != null ) &&
( stringValue.startsWith( Property.AUTHENTICATION_PROVIDER_NATIVE ) )&&
!settingToNativeLocal
)
{
throw StandardException.newException( SQLState.PROPERTY_DBO_LACKS_CREDENTIALS );
}
// once set to NATIVE authentication, you can't change it
String oldValue = (String) p.get( Property.AUTHENTICATION_PROVIDER_PARAMETER );
if ( (oldValue != null) && oldValue.startsWith( Property.AUTHENTICATION_PROVIDER_NATIVE ) )
{
throw StandardException.newException( SQLState.PROPERTY_CANT_UNDO_NATIVE );
}
// can't turn on NATIVE + LOCAL authentication unless the DBO's credentials are already stored.
// this should prevent setting NATIVE + LOCAL authentication in pre-10.9 databases too
// because you can't store credentials in a pre-10.9 database.
if ( settingToNativeLocal )
{
DataDictionary dd = getDataDictionary();
String dbo = dd.getAuthorizationDatabaseOwner();
UserDescriptor userCredentials = dd.getUser( dbo );
if ( userCredentials == null )
{
throw StandardException.newException( SQLState.PROPERTY_DBO_LACKS_CREDENTIALS );
}
}
}
if ( Property.AUTHENTICATION_NATIVE_PASSWORD_LIFETIME.equals( key ) )
{
if ( parsePasswordLifetime( stringValue ) == null )
{
throw StandardException.newException
( SQLState.BAD_PASSWORD_LIFETIME, Property.AUTHENTICATION_NATIVE_PASSWORD_LIFETIME );
}
}
if ( Property.AUTHENTICATION_PASSWORD_EXPIRATION_THRESHOLD.equals( key ) )
{
if ( parsePasswordThreshold( stringValue ) == null )
{
throw StandardException.newException
( SQLState.BAD_PASSWORD_LIFETIME, Property.AUTHENTICATION_PASSWORD_EXPIRATION_THRESHOLD );
}
}
return false;
}
/** Parse the value of the password lifetime property. Return null if it is bad. */
protected Long parsePasswordLifetime( String passwordLifetimeString )
{
try {
long passwordLifetime = Long.parseLong( passwordLifetimeString );
if ( passwordLifetime < 0L ) { passwordLifetime = 0L; }
return passwordLifetime;
} catch (Exception e) { return null; }
}
/** Parse the value of the password expiration threshold property. Return null if it is bad. */
protected Double parsePasswordThreshold( String expirationThresholdString )
{
try {
double expirationThreshold = Double.parseDouble( expirationThresholdString );
if ( expirationThreshold <= 0L ) { return null; }
else { return expirationThreshold; }
} catch (Exception e) { return null; }
}
/**
@see PropertySetCallback#validate
*/
public Serviceable apply(String key,Serializable value,Dictionary p)
{
return null;
}
/**
@see PropertySetCallback#map
@exception StandardException Thrown on error.
*/
public Serializable map(String key, Serializable value, Dictionary p)
throws StandardException
{
// We only care for "derby.user." property changes
// at the moment.
if (!key.startsWith(org.apache.derby.shared.common.reference.Property.USER_PROPERTY_PREFIX)) return null;
// We do not hash 'derby.user.<userName>' password if
// the configured authentication service is LDAP as the
// same property could be used to store LDAP user full DN (X500).
// In performing this check we only consider database properties
// not system, service or application properties.
String authService =
(String)p.get(org.apache.derby.shared.common.reference.Property.AUTHENTICATION_PROVIDER_PARAMETER);
if ((authService != null) &&
(StringUtil.SQLEqualsIgnoreCase(authService, org.apache.derby.shared.common.reference.Property.AUTHENTICATION_PROVIDER_LDAP)))
return null;
// Ok, we can hash this password in the db
String userPassword = (String) value;
if (userPassword != null) {
// hash (digest) the password
// the caller will retrieve the new value
String userName =
key.substring(Property.USER_PROPERTY_PREFIX.length());
userPassword =
hashUsingDefaultAlgorithm(userName, userPassword, p);
}
return userPassword;
}
// Class implementation
protected final boolean requireAuthentication(Properties properties) {
//
// we check if derby.connection.requireAuthentication system
// property is set to true, otherwise we are the authentication
// service that should be run.
//
String requireAuthentication = PropertyUtil.getPropertyFromSet(
properties,
org.apache.derby.shared.common.reference.Property.REQUIRE_AUTHENTICATION_PARAMETER
);
if ( Boolean.valueOf(requireAuthentication).booleanValue() ) { return true; }
//
// NATIVE authentication does not require that you set REQUIRE_AUTHENTICATION_PARAMETER.
//
return PropertyUtil.nativeAuthenticationEnabled( properties );
}
/**
* <p>
* This method hashes a clear user password using a
* Single Hash algorithm such as SHA-1 (SHA equivalent)
* (it is a 160 bits digest)
* </p>
*
* <p>
* The digest is returned as an object string.
* </p>
*
* <p>
* This method is only used by the SHA-1 authentication scheme.
* </p>
*
* @param plainTxtUserPassword Plain text user password
*
* @return hashed user password (digest) as a String object
* or {@code null} if the plaintext password is {@code null}
*/
protected String hashPasswordSHA1Scheme(String plainTxtUserPassword)
{
if (plainTxtUserPassword == null)
return null;
MessageDigest algorithm = null;
try
{
algorithm = MessageDigest.getInstance("SHA-1");
} catch (NoSuchAlgorithmException nsae)
{
// Ignore as we checked already during service boot-up
}
algorithm.reset();
byte[] bytePasswd = null;
bytePasswd = toHexByte(plainTxtUserPassword);
algorithm.update(bytePasswd);
byte[] hashedVal = algorithm.digest();
String hexString = PasswordHasher.ID_PATTERN_SHA1_SCHEME +
StringUtil.toHexString(hashedVal, 0, hashedVal.length);
return (hexString);
}
/**
* <p>
* Convert a string into a byte array in hex format.
* </p>
*
* <p>
* For each character (b) two bytes are generated, the first byte
* represents the high nibble (4 bits) in hexadecimal ({@code b & 0xf0}),
* the second byte represents the low nibble ({@code b & 0x0f}).
* </p>
*
* <p>
* The character at {@code str.charAt(0)} is represented by the first two
* bytes in the returned String.
* </p>
*
* <p>
* New code is encouraged to use {@code String.getBytes(String)} or similar
* methods instead, since this method does not preserve all bits for
* characters whose codepoint exceeds 8 bits. This method is preserved for
* compatibility with the SHA-1 authentication scheme.
* </p>
*
* @param str string
* @return the byte[] (with hexadecimal format) form of the string (str)
*/
private static byte[] toHexByte(String str)
{
byte[] data = new byte[str.length() * 2];
for (int i = 0; i < str.length(); i++)
{
char ch = str.charAt(i);
int high_nibble = (ch & 0xf0) >>> 4;
int low_nibble = (ch & 0x0f);
data[i] = (byte)high_nibble;
data[i+1] = (byte)low_nibble;
}
return data;
}
/**
* <p>
* Hash a password using the default message digest algorithm for this
* system before it's stored in the database.
* </p>
*
* <p>
* If the data dictionary supports the configurable hash authentication
* scheme, and the property {@code derby.authentication.builtin.algorithm}
* is a non-empty string, the password will be hashed using the
* algorithm specified by that property. Otherwise, we fall back to the new
* authentication scheme based on SHA-1. The algorithm used is encoded in
* the returned token so that the code that validates a user's credentials
* knows which algorithm to use.
* </p>
*
* @param user the user whose password to hash
* @param password the plain text password
* @param props database properties
* @return a digest of the user name and password formatted as a string,
* or {@code null} if {@code password} is {@code null}
* @throws StandardException if the specified algorithm is not supported
*/
String hashUsingDefaultAlgorithm(String user,
String password,
Dictionary props)
throws StandardException
{
if ( password == null ) { return null; }
PasswordHasher hasher = getDataDictionary().makePasswordHasher( props );
if ( hasher != null ) { return hasher.hashAndEncode( user, password ); }
else { return hashPasswordSHA1Scheme(password); }
}
/**
* Find the data dictionary for the current connection.
*
* @return the {@code DataDictionary} for the current connection
*/
private static DataDictionary getDataDictionary() {
LanguageConnectionContext lcc = (LanguageConnectionContext)
getContext(LanguageConnectionContext.CONTEXT_ID);
return lcc.getDataDictionary();
}
/**
* Strong Password Substitution (USRSSBPWD).
*
* This method generates a password substitute to authenticate a client
* which is using a DRDA security mechanism such as SECMEC_USRSSBPWD.
*
* Depending how the user is defined in Derby and if BUILTIN
* is used, the stored password can be in clear-text (system level)
* or encrypted (hashed - *not decryptable*)) (database level) - If the
* user has authenticated at the network level via SECMEC_USRSSBPWD, it
* means we're presented with a password substitute and we need to
* generate a substitute password coming from the store to compare with
* the one passed-in.
*
* The substitution algorithm used is the same as the one used in the
* SHA-1 authentication scheme ({@link PasswordHasher#ID_PATTERN_SHA1_SCHEME}), so in
* the case of database passwords stored using that scheme, we can simply
* compare the received hash with the stored hash. If the configurable
* hash authentication scheme {@link PasswordHasher#ID_PATTERN_CONFIGURABLE_HASH_SCHEME}
* is used, we have no way to find out if the received hash matches the
* stored password, since we cannot decrypt the hashed passwords and
* re-apply another hash algorithm. Therefore, strong password substitution
* only works if the database-level passwords are stored with the SHA-1
* scheme.
*
* NOTE: A lot of this logic could be shared with the DRDA decryption
* and client encryption managers - This will be done _once_
* code sharing along with its rules are defined between the
* Derby engine, client and network code (PENDING).
*
* Substitution algorithm works as follow:
*
* PW_TOKEN = SHA-1(PW, ID)
* The password (PW) and user name (ID) can be of any length greater
* than or equal to 1 byte.
* The client generates a 20-byte password substitute (PW_SUB) as follows:
* PW_SUB = SHA-1(PW_TOKEN, RDr, RDs, ID, PWSEQs)
*
* w/ (RDs) as the random client seed and (RDr) as the server one.
*
* See PWDSSB - Strong Password Substitution Security Mechanism
* (DRDA Vol.3 - P.650)
*
* @return a substituted password.
*/
protected String substitutePassword(
String userName,
String password,
Properties info,
boolean databaseUser) {
MessageDigest messageDigest = null;
// PWSEQs's 8-byte value constant - See DRDA Vol 3
byte SECMEC_USRSSBPWD_PWDSEQS[] = {
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00,
(byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01
};
// Generated password substitute
byte[] passwordSubstitute;
try
{
messageDigest = MessageDigest.getInstance("SHA-1");
} catch (NoSuchAlgorithmException nsae)
{
// Ignore as we checked already during service boot-up
}
// IMPORTANT NOTE: As the password is stored single-hashed in the
// database, it is impossible for us to decrypt the password and
// recompute a substitute to compare with one generated on the source
// side - Hence, we have to generate a password substitute.
// In other words, we cannot figure what the original password was -
// Strong Password Substitution (USRSSBPWD) cannot be supported for
// targets which can't access or decrypt passwords on their side.
//
messageDigest.reset();
byte[] bytePasswd = null;
byte[] userBytes = toHexByte(userName);
if (SanityManager.DEBUG)
{
// We must have a source and target seed
SanityManager.ASSERT(
(((String) info.getProperty(Attribute.DRDA_SECTKN_IN) != null) &&
((String) info.getProperty(Attribute.DRDA_SECTKN_OUT) != null)),
"Unexpected: Requester or server seed not available");
}
// Retrieve source (client) and target 8-byte seeds
String sourceSeedstr = info.getProperty(Attribute.DRDA_SECTKN_IN);
String targetSeedstr = info.getProperty(Attribute.DRDA_SECTKN_OUT);
byte[] sourceSeed_ =
StringUtil.fromHexString(sourceSeedstr, 0, sourceSeedstr.length());
byte[] targetSeed_ =
StringUtil.fromHexString(targetSeedstr, 0, targetSeedstr.length());
String hexString = null;
// If user is at the database level, we don't hash the password
// as it is already hashed (BUILTIN scheme) - we only do the
// BUILTIN hashing if the user is defined at the system level
// only - this is required beforehands so that we can do the password
// substitute generation right afterwards.
if (!databaseUser)
{
bytePasswd = toHexByte(password);
messageDigest.update(bytePasswd);
byte[] hashedVal = messageDigest.digest();
hexString = PasswordHasher.ID_PATTERN_SHA1_SCHEME +
StringUtil.toHexString(hashedVal, 0, hashedVal.length);
}
else
{
// Already hashed from the database store
// NOTE: If the password was stored with the configurable hash
// authentication scheme, the stored password will have been hashed
// with a different algorithm than the hashed password sent from
// the client. Since there's no way to decrypt the stored password
// and rehash it with the algorithm that the client uses, we are
// not able to compare the passwords, and the connection attempt
// will fail.
hexString = password;
}
// Generate the password substitute now
// Generate some 20-byte password token
messageDigest.update(userBytes);
messageDigest.update(toHexByte(hexString));
byte[] passwordToken = messageDigest.digest();
// Now we generate the 20-byte password substitute
messageDigest.update(passwordToken);
messageDigest.update(targetSeed_);
messageDigest.update(sourceSeed_);
messageDigest.update(userBytes);
messageDigest.update(SECMEC_USRSSBPWD_PWDSEQS);
passwordSubstitute = messageDigest.digest();
return StringUtil.toHexString(passwordSubstitute, 0,
passwordSubstitute.length);
}
/**
* Privileged lookup of the ContextService. Must be private so that user code
* can't call this entry point.
*/
private static ContextService getContextService()
{
return AccessController.doPrivileged
(
new PrivilegedAction<ContextService>()
{
public ContextService run()
{
return ContextService.getFactory();
}
}
);
}
/**
* Privileged lookup of a Context. Must be private so that user code
* can't call this entry point.
*/
private static Context getContext( final String contextID )
{
return AccessController.doPrivileged
(
new PrivilegedAction<Context>()
{
public Context run()
{
return ContextService.getContext( contextID );
}
}
);
}
/**
* Privileged service name lookup. Must be private so that user code
* can't call this entry point.
*/
private static String getServiceName( final Object serviceModule )
{
return AccessController.doPrivileged
(
new PrivilegedAction<String>()
{
public String run()
{
return Monitor.getServiceName( serviceModule );
}
}
);
}
/**
* Privileged module lookup. Must be package protected so that user code
* can't call this entry point.
*/
static Object getServiceModule( final Object serviceModule, final String factoryInterface )
{
return AccessController.doPrivileged
(
new PrivilegedAction<Object>()
{
public Object run()
{
return Monitor.getServiceModule( serviceModule, factoryInterface );
}
}
);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.automation.fluent.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.annotation.JsonFlatten;
import com.azure.core.management.ProxyResource;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.automation.models.DscConfigurationAssociationProperty;
import com.azure.resourcemanager.automation.models.JobProvisioningState;
import com.azure.resourcemanager.automation.models.JobStatus;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.Map;
import java.util.UUID;
/** Definition of the Dsc Compilation job. */
@JsonFlatten
@Fluent
public class DscCompilationJobInner extends ProxyResource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(DscCompilationJobInner.class);
/*
* Gets or sets the configuration.
*/
@JsonProperty(value = "properties.configuration")
private DscConfigurationAssociationProperty configuration;
/*
* Gets the compilation job started by.
*/
@JsonProperty(value = "properties.startedBy", access = JsonProperty.Access.WRITE_ONLY)
private String startedBy;
/*
* Gets the id of the job.
*/
@JsonProperty(value = "properties.jobId", access = JsonProperty.Access.WRITE_ONLY)
private UUID jobId;
/*
* Gets the creation time of the job.
*/
@JsonProperty(value = "properties.creationTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime creationTime;
/*
* The current provisioning state of the job.
*/
@JsonProperty(value = "properties.provisioningState")
private JobProvisioningState provisioningState;
/*
* Gets or sets the runOn which specifies the group name where the job is
* to be executed.
*/
@JsonProperty(value = "properties.runOn")
private String runOn;
/*
* Gets or sets the status of the job.
*/
@JsonProperty(value = "properties.status")
private JobStatus status;
/*
* Gets or sets the status details of the job.
*/
@JsonProperty(value = "properties.statusDetails")
private String statusDetails;
/*
* Gets the start time of the job.
*/
@JsonProperty(value = "properties.startTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime startTime;
/*
* Gets the end time of the job.
*/
@JsonProperty(value = "properties.endTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime endTime;
/*
* Gets the exception of the job.
*/
@JsonProperty(value = "properties.exception", access = JsonProperty.Access.WRITE_ONLY)
private String exception;
/*
* Gets the last modified time of the job.
*/
@JsonProperty(value = "properties.lastModifiedTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime lastModifiedTime;
/*
* Gets the last status modified time of the job.
*/
@JsonProperty(value = "properties.lastStatusModifiedTime", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime lastStatusModifiedTime;
/*
* Gets or sets the parameters of the job.
*/
@JsonProperty(value = "properties.parameters")
private Map<String, String> parameters;
/**
* Get the configuration property: Gets or sets the configuration.
*
* @return the configuration value.
*/
public DscConfigurationAssociationProperty configuration() {
return this.configuration;
}
/**
* Set the configuration property: Gets or sets the configuration.
*
* @param configuration the configuration value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withConfiguration(DscConfigurationAssociationProperty configuration) {
this.configuration = configuration;
return this;
}
/**
* Get the startedBy property: Gets the compilation job started by.
*
* @return the startedBy value.
*/
public String startedBy() {
return this.startedBy;
}
/**
* Get the jobId property: Gets the id of the job.
*
* @return the jobId value.
*/
public UUID jobId() {
return this.jobId;
}
/**
* Get the creationTime property: Gets the creation time of the job.
*
* @return the creationTime value.
*/
public OffsetDateTime creationTime() {
return this.creationTime;
}
/**
* Get the provisioningState property: The current provisioning state of the job.
*
* @return the provisioningState value.
*/
public JobProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Set the provisioningState property: The current provisioning state of the job.
*
* @param provisioningState the provisioningState value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withProvisioningState(JobProvisioningState provisioningState) {
this.provisioningState = provisioningState;
return this;
}
/**
* Get the runOn property: Gets or sets the runOn which specifies the group name where the job is to be executed.
*
* @return the runOn value.
*/
public String runOn() {
return this.runOn;
}
/**
* Set the runOn property: Gets or sets the runOn which specifies the group name where the job is to be executed.
*
* @param runOn the runOn value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withRunOn(String runOn) {
this.runOn = runOn;
return this;
}
/**
* Get the status property: Gets or sets the status of the job.
*
* @return the status value.
*/
public JobStatus status() {
return this.status;
}
/**
* Set the status property: Gets or sets the status of the job.
*
* @param status the status value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withStatus(JobStatus status) {
this.status = status;
return this;
}
/**
* Get the statusDetails property: Gets or sets the status details of the job.
*
* @return the statusDetails value.
*/
public String statusDetails() {
return this.statusDetails;
}
/**
* Set the statusDetails property: Gets or sets the status details of the job.
*
* @param statusDetails the statusDetails value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withStatusDetails(String statusDetails) {
this.statusDetails = statusDetails;
return this;
}
/**
* Get the startTime property: Gets the start time of the job.
*
* @return the startTime value.
*/
public OffsetDateTime startTime() {
return this.startTime;
}
/**
* Get the endTime property: Gets the end time of the job.
*
* @return the endTime value.
*/
public OffsetDateTime endTime() {
return this.endTime;
}
/**
* Get the exception property: Gets the exception of the job.
*
* @return the exception value.
*/
public String exception() {
return this.exception;
}
/**
* Get the lastModifiedTime property: Gets the last modified time of the job.
*
* @return the lastModifiedTime value.
*/
public OffsetDateTime lastModifiedTime() {
return this.lastModifiedTime;
}
/**
* Get the lastStatusModifiedTime property: Gets the last status modified time of the job.
*
* @return the lastStatusModifiedTime value.
*/
public OffsetDateTime lastStatusModifiedTime() {
return this.lastStatusModifiedTime;
}
/**
* Get the parameters property: Gets or sets the parameters of the job.
*
* @return the parameters value.
*/
public Map<String, String> parameters() {
return this.parameters;
}
/**
* Set the parameters property: Gets or sets the parameters of the job.
*
* @param parameters the parameters value to set.
* @return the DscCompilationJobInner object itself.
*/
public DscCompilationJobInner withParameters(Map<String, String> parameters) {
this.parameters = parameters;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (configuration() != null) {
configuration().validate();
}
}
}
| |
/*
* Copyright 2014-2015. Adaptive.me.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package me.adaptive.che.plugin.server.project.generator;
import me.adaptive.che.plugin.server.util.CommandLineBuilder;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.che.api.project.server.type.AttributeValue;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by panthro on 22/07/15.
* <p>
* Yeoman generator task runner, supports generator version 2.0.96
*/
public class GeneratorCommandBuilder extends CommandLineBuilder {
/**
* Sample Command
* yo adaptiveme test latest false "Initializr Bootstrap" "ios,android" --ios-version=8.1 --android-version=5.1
*
* yo adaptiveme:app [options] [<arg1>] [<arg2>] [<arg3>] [<arg4>] [<arg5>] [<arg6>]
*
* Options:
* -h, --help # Print the generator's options and usage
* --skip-cache # Do not remember prompt answers Default: false
* --skip-install # Skip dependencies installation Default: false
* --start-nibble # Start the nibble emulator at the end of the generation. The nibble should be installed globally. Default: false
* --ios-version # iOS version selected. ex: 8.1 Default: false
* --android-version # Android version selected. ex: 5.0 Default: false
*
* Arguments:
* arg1 # Your project name Type: String Required: false
* arg2 # Adaptive Javascript Library version (defaults = latest) Type: String Required: false
* arg3 # Add typescript support Type: Boolean Required: false
* arg4 # Boilerplate for initialize application Type: String Required: false
* arg5 # Array of platforms selected. ex: [ios,android] Type: Array Required: false
* arg6 # Application Identifier. ex: me.adaptive.arp Type: String Required: false
*/
public static final String PLATFORMS_SEPARATOR = ",";
public static final String YEOMAN_COMMAND = "yo";
public static final String GENERATOR_NAME = "adaptiveme";
public static final String SKIP_INSTALL = "--skip-install";
public static final String SKIP_CACHE = "--skip-cache";
public static final String IOS_VERSION = "--ios-version";
public static final String ANDROID_VERSION = "--android-version";
public class Options {
public static final String ADAPTIVE_VERSION = "adaptive";
public static final String TYPESCRIPT = "typescript";
public static final String BOILERPLATE = "boilerplate";
public static final String PLATFORMS = "platforms";
public static final String IOS_VERSION = "iosVersion";
public static final String ANDROID_VERSION = "androidVersion";
public static final String APP_ID = "appId";
}
//TODO check defaults
private class DEFAULTS {
private static final String ADAPTIVE = "latest";
private static final String BOILERPLATE = "none";
private static final String PLATFORMS = "android";//,ios";
private static final String IOS_VERSION = "8.1";
private static final String ANDROID_VERSION = "5.0";
private static final boolean SKIP_INSTALL = true;
private static final boolean SKIP_SERVER = true;
private static final String APP_ID_PREFIX = "me.adaptive.app.";
}
private String projectName;
private String adaptiveVersion;
private boolean typescriptSupport;
private String boilerplate;
private String[] platforms;
private boolean skipInstall = DEFAULTS.SKIP_INSTALL;
private boolean skipCache = DEFAULTS.SKIP_SERVER;
private String iosVersion;
private String androidVersion;
private String appId;
public GeneratorCommandBuilder withProjectName(String projectName) {
this.projectName = projectName;
return this;
}
public GeneratorCommandBuilder withAdaptiveVersion(String adaptiveVersion) {
this.adaptiveVersion = adaptiveVersion;
return this;
}
public GeneratorCommandBuilder withTypescriptSupport(boolean typescriptSupport) {
this.typescriptSupport = typescriptSupport;
return this;
}
public GeneratorCommandBuilder withBoilerplate(String boilerplate) {
this.boilerplate = boilerplate;
return this;
}
public GeneratorCommandBuilder witPlatforms(String[] platforms) {
this.platforms = platforms;
return this;
}
public GeneratorCommandBuilder withSkipInstall(boolean skipInstall) {
this.skipInstall = skipInstall;
return this;
}
public GeneratorCommandBuilder withSkipCache(boolean skipCache) {
this.skipCache = skipCache;
return this;
}
public GeneratorCommandBuilder withAppId(String appId) {
this.appId = appId;
return this;
}
public GeneratorCommandBuilder withOptions(Map<String, String> options) {
if (options != null && !options.keySet().isEmpty()) {
if (options.containsKey(Options.ADAPTIVE_VERSION)) {
setAdaptiveVersion(options.get(Options.ADAPTIVE_VERSION));
}
if (options.containsKey(Options.TYPESCRIPT)) {
setTypescriptSupport(true); //DO NOT SEND THE typescript if not supported
}
if (options.containsKey(Options.BOILERPLATE)) {
setBoilerplate(options.get(Options.BOILERPLATE));
}
if (options.containsKey(Options.PLATFORMS)) {
setPlatforms(options.get(Options.PLATFORMS).split(PLATFORMS_SEPARATOR));
}
if (options.containsKey(Options.IOS_VERSION)) {
setIosVersion(options.get(Options.IOS_VERSION));
}
if (options.containsKey(Options.ANDROID_VERSION)) {
setAndroidVersion(options.get(Options.ANDROID_VERSION));
}
if (options.containsKey(Options.APP_ID)) {
setAppId(options.get(Options.APP_ID));
}
}
return this;
}
public GeneratorCommandBuilder withAttributes(Map<String, AttributeValue> attributes) {
return this;
}
/**
* Creates a generator instance with all parameters set
*
* @param projectName the project name
* @param adaptiveVersion the version of adaptive to use
* @param typescriptSupport should enable typescript
* @param boilerplate which boilerplate to use
* @param platforms which platforms
* @param iosVersion which ios version
* @param androidVersion which android version
* @see <a href="https://www.npmjs.com/package/generator-adaptiveme">Generator Docs</a>
*/
public GeneratorCommandBuilder(String projectName, String adaptiveVersion, boolean typescriptSupport, String boilerplate, String[] platforms, String iosVersion, String androidVersion) {
this.projectName = projectName;
this.adaptiveVersion = adaptiveVersion;
this.typescriptSupport = typescriptSupport;
this.boilerplate = boilerplate;
this.platforms = platforms;
this.iosVersion = iosVersion;
this.androidVersion = androidVersion;
}
/**
* Creates a generator with all default options
*
* @param projectName
*/
public GeneratorCommandBuilder(String projectName) {
this(projectName, DEFAULTS.ADAPTIVE, false, DEFAULTS.BOILERPLATE, DEFAULTS.PLATFORMS.split(PLATFORMS_SEPARATOR), DEFAULTS.IOS_VERSION, DEFAULTS.ANDROID_VERSION);
}
public String getProjectName() {
return projectName;
}
public void setProjectName(String projectName) {
this.projectName = projectName;
}
public String getAdaptiveVersion() {
return adaptiveVersion;
}
public void setAdaptiveVersion(String adaptiveVersion) {
this.adaptiveVersion = adaptiveVersion;
}
public Boolean isTypescriptSupport() {
return typescriptSupport;
}
public void setTypescriptSupport(boolean typescriptSupport) {
this.typescriptSupport = typescriptSupport;
}
public String getBoilerplate() {
return boilerplate;
}
public void setBoilerplate(String boilerplate) {
this.boilerplate = boilerplate;
}
public boolean isSkipInstall() {
return skipInstall;
}
public void setSkipInstall(boolean skipInstall) {
this.skipInstall = skipInstall;
}
public boolean isSkipCache() {
return skipCache;
}
public void setSkipCache(boolean skipCache) {
this.skipCache = skipCache;
}
public String[] getPlatforms() {
return platforms;
}
public void setPlatforms(String[] platforms) {
this.platforms = platforms;
}
public String getAndroidVersion() {
return androidVersion;
}
public void setAndroidVersion(String androidVersion) {
this.androidVersion = androidVersion;
}
public String getIosVersion() {
return iosVersion;
}
public void setIosVersion(String iosVersion) {
this.iosVersion = iosVersion;
}
public String getAppId() {
if (StringUtils.isEmpty(appId)) {
appId = DEFAULTS.APP_ID_PREFIX + StringUtils.removePattern(getProjectName(), "[^A-Za-z0-9]");
}
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
@Override
public String[] getParameters() {
List<String> paramList = new ArrayList<>();
//Order MATTERS!
paramList.add(GENERATOR_NAME);
paramList.add(getProjectName());
paramList.add(getAdaptiveVersion().toLowerCase());
paramList.add(isTypescriptSupport().toString().toLowerCase());
paramList.add(getBoilerplate());
paramList.add(StringUtils.join(platforms, PLATFORMS_SEPARATOR));
paramList.add(getAppId());
paramList.add(IOS_VERSION);
paramList.add(iosVersion);
paramList.add(ANDROID_VERSION);
paramList.add(androidVersion);
if (skipInstall) {
paramList.add(SKIP_INSTALL);
}
if (skipCache) {
paramList.add(SKIP_CACHE);
}
return paramList.toArray(new String[paramList.size()]);
}
@Override
public String getCommand() {
return YEOMAN_COMMAND;
}
}
| |
package <%=packageName%>.web.rest.dto;
<% if (fieldsContainLocalDate == true) { %>
import java.time.LocalDate;<% } %><% if (fieldsContainZonedDateTime == true) { %>
import java.time.ZonedDateTime;<% } %><% if (validation) { %>
import javax.validation.constraints.*;<% } %>
import java.io.Serializable;<% if (fieldsContainBigDecimal == true) { %>
import java.math.BigDecimal;<% } %><% if (fieldsContainBlob && databaseType === 'cassandra') { %>
import java.nio.ByteBuffer;<% } %><% if (fieldsContainDate == true) { %>
import java.util.Date;<% } %><% if (relationships.length > 0) { %>
import java.util.HashSet;
import java.util.Set;<% } %>
import java.util.Objects;<% if (databaseType == 'cassandra') { %>
import java.util.UUID;<% } %><% if (fieldsContainBlob && databaseType === 'sql') { %>
import javax.persistence.Lob;<% } %>
<% for (idx in fields) { if (fields[idx].fieldIsEnum == true) { %>
import <%=packageName%>.domain.enumeration.<%= fields[idx].fieldType %>;<% } } %>
/**
* A DTO for the <%= entityClass %> entity.
*/
public class <%= entityClass %>DTO implements Serializable {
<% if (databaseType == 'sql') { %>
private Long id;<% } %><% if (databaseType == 'mongodb') { %>
private String id;<% } %><% if (databaseType == 'cassandra') { %>
private UUID id;<% } %>
<%_ for (idx in fields) {
var fieldValidate = fields[idx].fieldValidate;
var fieldValidateRules = fields[idx].fieldValidateRules;
var fieldValidateRulesMinlength = fields[idx].fieldValidateRulesMinlength;
var fieldValidateRulesMaxlength = fields[idx].fieldValidateRulesMaxlength;
var fieldValidateRulesMinbytes = fields[idx].fieldValidateRulesMinbytes;
var fieldValidateRulesMaxbytes = fields[idx].fieldValidateRulesMaxbytes;
var fieldValidateRulesMin = fields[idx].fieldValidateRulesMin;
var fieldValidateRulesMax = fields[idx].fieldValidateRulesMax;
var fieldValidateRulesPatternJava = fields[idx].fieldValidateRulesPatternJava;
var fieldType = fields[idx].fieldType;
var fieldTypeBlobContent = fields[idx].fieldTypeBlobContent;
var fieldName = fields[idx].fieldName;
if (fieldValidate == true) {
var required = false;
var MAX_VALUE = 2147483647;
if (fieldValidate == true && fieldValidateRules.indexOf('required') != -1) {
required = true;
}
if (required) { _%>
@NotNull<% } %><% if (fieldValidateRules.indexOf('minlength') != -1 && fieldValidateRules.indexOf('maxlength') == -1) { %>
@Size(min = <%= fieldValidateRulesMinlength %>)<% } %><% if (fieldValidateRules.indexOf('maxlength') != -1 && fieldValidateRules.indexOf('minlength') == -1) { %>
@Size(max = <%= fieldValidateRulesMaxlength %>)<% } %><% if (fieldValidateRules.indexOf('minlength') != -1 && fieldValidateRules.indexOf('maxlength') != -1) { %>
@Size(min = <%= fieldValidateRulesMinlength %>, max = <%= fieldValidateRulesMaxlength %>)<% } %><% if (fieldValidateRules.indexOf('minbytes') != -1 && fieldValidateRules.indexOf('maxbytes') == -1) { %>
@Size(min = <%= fieldValidateRulesMinbytes %>)<% } %><% if (fieldValidateRules.indexOf('maxbytes') != -1 && fieldValidateRules.indexOf('minbytes') == -1) { %>
@Size(max = <%= fieldValidateRulesMaxbytes %>)<% } %><% if (fieldValidateRules.indexOf('minbytes') != -1 && fieldValidateRules.indexOf('maxbytes') != -1) { %>
@Size(min = <%= fieldValidateRulesMinbytes %>, max = <%= fieldValidateRulesMaxbytes %>)<% } %><% if (fieldValidateRules.indexOf('min') != -1) { %>
@Min(value = <%= fieldValidateRulesMin %>)<% } %><% if (fieldValidateRules.indexOf('max') != -1) { %>
@Max(value = <%= fieldValidateRulesMax %><%= (fieldValidateRulesMax > MAX_VALUE) ? 'L' : '' %>)<% } %><% if (fieldValidateRules.indexOf('pattern') != -1) { %>
@Pattern(regexp = "<%= fieldValidateRulesPatternJava %>")<% } } %><% if (fieldType == 'byte[]' && databaseType === 'sql') { %>
@Lob<% } %>
<%_ if (fieldTypeBlobContent != 'text') { _%>
private <%= fieldType %> <%= fieldName %>;
<%_ } else { _%>
private String <%= fieldName %>;
<%_ } %>
<%_ if ((fieldType == 'byte[]' || fieldType === 'ByteBuffer') && fieldTypeBlobContent != 'text') { _%>
private String <%= fieldName %>ContentType;
<%_ } _%>
<%_ } _%>
<%_ for (idx in relationships) {
var otherEntityRelationshipName = relationships[idx].otherEntityRelationshipName,
relationshipFieldName = relationships[idx].relationshipFieldName,
relationshipFieldNamePlural = relationships[idx].relationshipFieldNamePlural,
relationshipType = relationships[idx].relationshipType,
otherEntityNameCapitalized = relationships[idx].otherEntityNameCapitalized,
otherEntityFieldCapitalized = relationships[idx].otherEntityFieldCapitalized,
ownerSide = relationships[idx].ownerSide; %><% if (relationshipType == 'many-to-many' && ownerSide == true) { _%>
private Set<<%= otherEntityNameCapitalized %>DTO> <%= relationshipFieldNamePlural %> = new HashSet<>();
<%_ } else if (relationshipType == 'many-to-one' || (relationshipType == 'one-to-one' && ownerSide == true)) { _%>
private Long <%= relationshipFieldName %>Id;
<% if (otherEntityFieldCapitalized !='Id' && otherEntityFieldCapitalized != '') { %>
private String <%= relationshipFieldName %><%= otherEntityFieldCapitalized %>;
<%_ } } } _%>
public <% if (databaseType == 'sql') { %>Long<% } %><% if (databaseType == 'mongodb') { %>String<% } %><% if (databaseType == 'cassandra') { %>UUID<% } %> getId() {
return id;
}
public void setId(<% if (databaseType == 'sql') { %>Long<% } %><% if (databaseType == 'mongodb') { %>String<% } %><% if (databaseType == 'cassandra') { %>UUID<% } %> id) {
this.id = id;
}
<%_ for (idx in fields) {
var fieldType = fields[idx].fieldType;
var fieldTypeBlobContent = fields[idx].fieldTypeBlobContent;
var fieldInJavaBeanMethod = fields[idx].fieldInJavaBeanMethod;
var fieldName = fields[idx].fieldName; _%>
<%_ if(fieldTypeBlobContent != 'text') { _%>
public <%= fieldType %> get<%= fieldInJavaBeanMethod %>() {
<%_ } else { _%>
public String get<%= fieldInJavaBeanMethod %>() {
<%_ } _%>
return <%= fieldName %>;
}
<%_ if(fieldTypeBlobContent != 'text') { _%>
public void set<%= fieldInJavaBeanMethod %>(<%= fieldType %> <%= fieldName %>) {
<%_ } else { _%>
public void set<%= fieldInJavaBeanMethod %>(String <%= fieldName %>) {
<%_ } _%>
this.<%= fieldName %> = <%= fieldName %>;
}
<%_ if ((fieldType == 'byte[]' || fieldType === 'ByteBuffer') && fieldTypeBlobContent != 'text') { _%>
public String get<%= fieldInJavaBeanMethod %>ContentType() {
return <%= fieldName %>ContentType;
}
public void set<%= fieldInJavaBeanMethod %>ContentType(String <%= fieldName %>ContentType) {
this.<%= fieldName %>ContentType = <%= fieldName %>ContentType;
}
<%_ } } _%>
<%_ for (idx in relationships) {
relationshipFieldName = relationships[idx].relationshipFieldName,
relationshipFieldNamePlural = relationships[idx].relationshipFieldNamePlural,
otherEntityName = relationships[idx].otherEntityName,
otherEntityNamePlural = relationships[idx].otherEntityNamePlural,
relationshipType = relationships[idx].relationshipType,
otherEntityNameCapitalized = relationships[idx].otherEntityNameCapitalized,
otherEntityFieldCapitalized = relationships[idx].otherEntityFieldCapitalized,
relationshipNameCapitalized = relationships[idx].relationshipNameCapitalized,
relationshipNameCapitalizedPlural = relationships[idx].relationshipNameCapitalizedPlural,
ownerSide = relationships[idx].ownerSide;
if (relationshipType == 'many-to-many' && ownerSide == true) { _%>
public Set<<%= otherEntityNameCapitalized %>DTO> get<%= relationshipNameCapitalizedPlural %>() {
return <%= relationshipFieldNamePlural %>;
}
public void set<%= relationshipNameCapitalizedPlural %>(Set<<%= otherEntityNameCapitalized %>DTO> <%= otherEntityNamePlural %>) {
this.<%= relationshipFieldNamePlural %> = <%= otherEntityNamePlural %>;
}
<%_ } else if (relationshipType == 'many-to-one' || (relationshipType == 'one-to-one' && ownerSide == true)) { _%>
<%_ if (relationshipNameCapitalized.length > 1) { _%>
public Long get<%= relationshipNameCapitalized %>Id() {
return <%= relationshipFieldName %>Id;
}
public void set<%= relationshipNameCapitalized %>Id(Long <%= otherEntityName %>Id) {
this.<%= relationshipFieldName %>Id = <%= otherEntityName %>Id;
}
<%_ } else { // special case when the entity name has one character _%>
public Long get<%= relationshipNameCapitalized.toLowerCase() %>Id() {
return <%= relationshipFieldName %>Id;
}
public void set<%= relationshipNameCapitalized.toLowerCase() %>Id(Long <%= otherEntityName %>Id) {
this.<%= relationshipFieldName %>Id = <%= otherEntityName %>Id;
}
<%_ } _%>
<% if (otherEntityFieldCapitalized !='Id' && otherEntityFieldCapitalized != '') { %>
public String get<%= relationshipNameCapitalized %><%= otherEntityFieldCapitalized %>() {
return <%= relationshipFieldName %><%= otherEntityFieldCapitalized %>;
}
public void set<%= relationshipNameCapitalized %><%= otherEntityFieldCapitalized %>(String <%= otherEntityName %><%= otherEntityFieldCapitalized %>) {
this.<%= relationshipFieldName %><%= otherEntityFieldCapitalized %> = <%= otherEntityName %><%= otherEntityFieldCapitalized %>;
}
<%_ } } } _%>
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
<%= entityClass %>DTO <%= entityInstance %>DTO = (<%= entityClass %>DTO) o;
if ( ! Objects.equals(id, <%= entityInstance %>DTO.id)) return false;
return true;
}
@Override
public int hashCode() {
return Objects.hashCode(id);
}
@Override
public String toString() {
return "<%= entityClass %>DTO{" +
"id=" + id +<% for (idx in fields) {
var fieldName = fields[idx].fieldName; %>
", <%= fieldName %>='" + <%= fieldName %> + "'" +<% } %>
'}';
}
}
| |
/*
* Copyright (c) 2018 Livio, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Livio Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.smartdevicelink.transport;
import android.annotation.SuppressLint;
import android.content.ComponentName;
import android.content.Context;
import android.os.Build;
import android.os.Looper;
import android.os.Parcelable;
import android.util.Log;
import com.smartdevicelink.SdlConnection.SdlConnection;
import com.smartdevicelink.exception.SdlException;
import com.smartdevicelink.protocol.SdlPacket;
import com.smartdevicelink.transport.enums.TransportType;
import com.smartdevicelink.transport.utl.TransportRecord;
import java.util.List;
@Deprecated
public class MultiplexTransport extends SdlTransport{
private final static String TAG = "Multiplex Transport";
private String sComment = "Multiplexing";
TransportBrokerThread brokerThread;
protected boolean isDisconnecting = false;
MultiplexTransportConfig transportConfig;
public MultiplexTransport(MultiplexTransportConfig transportConfig, final ITransportListener transportListener){
super(transportListener);
if(transportConfig == null){
this.handleTransportError("Transport config was null", null);
throw new IllegalArgumentException("Null transportConfig in MultiplexTransport constructor");
}
this.transportConfig = transportConfig;
brokerThread = new TransportBrokerThread(transportConfig.context, transportConfig.appId, transportConfig.service);
brokerThread.start();
isDisconnecting = false;
//brokerThread.initTransportBroker();
//brokerThread.start();
}
public boolean isDisconnecting(){
return this.isDisconnecting;
}
/**
* Returns the config that was used to create this transport
* @return
*/
public MultiplexTransportConfig getConfig(){
return this.transportConfig;
}
public boolean requestNewSession(){
if(brokerThread!=null){
brokerThread.requestNewSession();
return true;
}
return false;
}
public void removeSession(long sessionId){
if(brokerThread!=null){
brokerThread.removeSession(sessionId);
}
}
/**
* Overridden abstract method which returns specific type of this transport.
*
* @return Constant value - TransportType.BLUETOOTH.
* @see TransportType
*/
public TransportType getTransportType() {
return TransportType.MULTIPLEX;
}
@Override
public String getBroadcastComment() {
return sComment;
}
@Override
protected boolean sendBytesOverTransport(SdlPacket packet) {
if(brokerThread!=null){
brokerThread.sendPacket(packet);
return true;
}
return false; //Sure why not.
}
@Override
public void openConnection() throws SdlException {
Log.d(TAG, "Open connection");
if(brokerThread!=null){
brokerThread.startConnection();
}//else should log out
}
@Override
public void disconnect() {
if(isDisconnecting){
return;
}
Log.d(TAG, "Close connection");
this.isDisconnecting= true;
if(brokerThread!= null){
brokerThread.cancel();
brokerThread = null;
}
handleTransportDisconnected(TransportType.MULTIPLEX.name());
isDisconnecting = false;
}
@Override
protected void handleTransportError(String message, Exception ex) {
if(brokerThread!=null){
brokerThread.cancel();
//brokerThread.interrupt();
brokerThread = null;
}
super.handleTransportError(message, ex);
}
public boolean isPendingConnected(){
if(brokerThread!=null){
return brokerThread.queueStart;
}
return false;
}
/**
* This thread will handle the broker transaction with the router service.
*
*/
protected class TransportBrokerThread extends Thread{
boolean connected = false; //This helps clear up double on hardware connects
TransportBroker broker;
boolean queueStart = false;
final Context context;
final String appId;
final ComponentName service;
Looper threadLooper = null;
/**
* Thread will automatically start to prepare its looper.
* @param context
* @param appId
*/
public TransportBrokerThread(Context context, String appId, ComponentName service){
//this.start();
super();
this.context = context;
this.appId = appId;
this.service = service;
//initTransportBroker(context, appId);
}
public void startConnection(){
synchronized(this){
connected = false;
if(broker!=null){
try{
broker.start();
}catch(Exception e){
handleTransportError("Error starting transport", e);
}
}else{
queueStart = true;
}
}
}
@SuppressLint("NewApi")
public synchronized void cancel(){
if(broker!=null){
broker.stop();
broker = null;
}
connected = false;
if(threadLooper !=null){
if(Build.VERSION.SDK_INT>=Build.VERSION_CODES.JELLY_BEAN_MR2){
threadLooper.quitSafely();
}else{
threadLooper.quit();
}
threadLooper = null;
}
//this.interrupt();
}
public void onHardwareConnected(TransportType type){
if(broker!=null){
broker.onHardwareConnected(type);
}else{
queueStart = true;
}
}
public void sendPacket(SdlPacket packet){
broker.sendPacketToRouterService(packet);
}
public void requestNewSession(){
if(broker!=null){
broker.requestNewSession();
}
}
public void removeSession(long sessionId){
if(broker!=null){
broker.removeSession(sessionId);
}
}
@Override
public void run() {
Looper.prepare();
if(broker==null){
synchronized(this){
initTransportBroker();
if(queueStart){
try{
broker.start();
}catch(Exception e){
handleTransportError("Error starting transport", e);
}
}
this.notify();
}
}
threadLooper = Looper.myLooper();
Looper.loop();
}
public void initTransportBroker(){
broker = new TransportBroker(context, appId, service){
@Override
public boolean onHardwareConnected(TransportType type) {
if(super.onHardwareConnected(type)){
Log.d(TAG, "On transport connected...");
if(!connected){
connected = true;
handleTransportConnected();
}//else{Log.d(TAG, "Already connected");}
return true;
}else{
try{
this.start();
}catch(Exception e){
handleTransportError("Error starting transport", e);
}
}
return false;
}
@Override
public void onHardwareDisconnected(TransportRecord transportRecord, List<TransportRecord> connected) {
onHardwareDisconnected(TransportType.BLUETOOTH);
}
@Override
public void onHardwareDisconnected(TransportType type) {
super.onHardwareDisconnected(type);
if(connected){
Log.d(TAG, "Handling disconnect");
connected = false;
SdlConnection.enableLegacyMode(isLegacyModeEnabled(), TransportType.BLUETOOTH);
if(isLegacyModeEnabled()){
Log.d(TAG, "Handle transport disconnect, legacy mode enabled");
this.stop();
isDisconnecting = true;
//handleTransportDisconnected("");
handleTransportError("",null); //This seems wrong, but it works
}else{
Log.d(TAG, "Handle transport Error");
isDisconnecting = true;
handleTransportError("",null); //This seems wrong, but it works
}
}
}
@Override
public void onLegacyModeEnabled() {
super.onLegacyModeEnabled();
SdlConnection.enableLegacyMode(isLegacyModeEnabled(), TransportType.BLUETOOTH);
if(isLegacyModeEnabled()){
Log.d(TAG, "Handle on legacy mode enabled");
this.stop();
isDisconnecting = true;
//handleTransportDisconnected("");
handleTransportError("",null); //This seems wrong, but it works
}
}
@Override
public void onPacketReceived(Parcelable packet) {
if(packet!=null){
SdlPacket sdlPacket = (SdlPacket)packet;
handleReceivedPacket(sdlPacket);
}
}
};
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.cache.CacheException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.PartitionLossPolicy;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.WALMode;
import org.apache.ignite.events.CacheRebalancingEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.events.EventType;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.processors.cache.CacheInvalidStateException;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionDemandMessage;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.cache.PartitionLossPolicy.IGNORE;
import static org.apache.ignite.cache.PartitionLossPolicy.READ_ONLY_ALL;
import static org.apache.ignite.cache.PartitionLossPolicy.READ_ONLY_SAFE;
import static org.apache.ignite.cache.PartitionLossPolicy.READ_WRITE_SAFE;
/**
*
*/
@RunWith(Parameterized.class)
public class IgniteCachePartitionLossPolicySelfTest extends GridCommonAbstractTest {
/** */
private static final int PARTS_CNT = 32;
/** */
private boolean client;
/** */
@Parameterized.Parameter(value = 0)
public CacheAtomicityMode atomicityMode;
/** */
@Parameterized.Parameter(value = 1)
public PartitionLossPolicy partLossPlc;
/** */
@Parameterized.Parameter(value = 2)
public int backups;
/** */
@Parameterized.Parameter(value = 3)
public boolean autoAdjust;
/** */
@Parameterized.Parameter(value = 4)
public int nodes;
/** */
@Parameterized.Parameter(value = 5)
public int[] stopNodes;
/** */
@Parameterized.Parameter(value = 6)
public boolean persistence;
/** */
private static final String[] CACHES = new String[]{"cache1", "cache2"};
/** */
@Parameterized.Parameters(name = "{0} {1} {2} {3} {4} {6}")
public static List<Object[]> parameters() {
ArrayList<Object[]> params = new ArrayList<>();
Random r = new Random();
System.out.println("Seed: " + U.field(r, "seed"));
for (CacheAtomicityMode mode : Arrays.asList(TRANSACTIONAL, ATOMIC)) {
// Test always scenarios.
params.add(new Object[]{mode, IGNORE, 0, false, 3, new int[]{2}, false});
params.add(new Object[]{mode, IGNORE, 0, false, 3, new int[]{2}, true});
params.add(new Object[]{mode, READ_ONLY_SAFE, 1, true, 4, new int[]{2, 0}, false});
params.add(new Object[]{mode, IGNORE, 1, false, 4, new int[]{0, 2}, false});
params.add(new Object[]{mode, READ_WRITE_SAFE, 2, true, 5, new int[]{1, 0, 2}, false});
// Random scenarios.
for (Integer backups : Arrays.asList(0, 1, 2)) {
int nodes = backups + 3;
int[] stopIdxs = new int[backups + 1];
List<Integer> tmp = IntStream.range(0, nodes).boxed().collect(Collectors.toList());
Collections.shuffle(tmp, r);
for (int i = 0; i < stopIdxs.length; i++)
stopIdxs[i] = tmp.get(i);
params.add(new Object[]{mode, READ_WRITE_SAFE, backups, false, nodes, stopIdxs, false});
params.add(new Object[]{mode, IGNORE, backups, false, nodes, stopIdxs, false});
params.add(new Object[]{mode, READ_ONLY_SAFE, backups, false, nodes, stopIdxs, false});
params.add(new Object[]{mode, READ_ONLY_ALL, backups, false, nodes, stopIdxs, false});
params.add(new Object[]{mode, READ_WRITE_SAFE, backups, true, nodes, stopIdxs, false});
params.add(new Object[]{mode, IGNORE, backups, true, nodes, stopIdxs, false});
params.add(new Object[]{mode, READ_ONLY_SAFE, backups, true, nodes, stopIdxs, false});
params.add(new Object[]{mode, READ_ONLY_ALL, backups, true, nodes, stopIdxs, false});
boolean ignored = false; // Autoadjust is currently ignored for persistent mode.
params.add(new Object[]{mode, READ_WRITE_SAFE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, IGNORE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, READ_ONLY_SAFE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, READ_ONLY_ALL, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, READ_WRITE_SAFE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, IGNORE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, READ_ONLY_SAFE, backups, ignored, nodes, stopIdxs, true});
params.add(new Object[]{mode, READ_ONLY_ALL, backups, ignored, nodes, stopIdxs, true});
}
}
return params;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
cfg.setCommunicationSpi(new TestRecordingCommunicationSpi());
cfg.setConsistentId(gridName);
cfg.setClientMode(client);
cfg.setDataStorageConfiguration(
new DataStorageConfiguration()
.setWalMode(WALMode.LOG_ONLY)
.setWalSegmentSize(4 * 1024 * 1024)
.setDefaultDataRegionConfiguration(
new DataRegionConfiguration()
.setPersistenceEnabled(persistence)
.setMaxSize(100L * 1024 * 1024))
);
CacheConfiguration[] ccfgs = new CacheConfiguration[CACHES.length];
for (int i = 0; i < ccfgs.length; i++) {
ccfgs[i] = new CacheConfiguration(CACHES[i])
.setAtomicityMode(atomicityMode)
.setCacheMode(PARTITIONED)
.setBackups(backups)
.setWriteSynchronizationMode(FULL_SYNC)
.setPartitionLossPolicy(partLossPlc)
.setAffinity(new RendezvousAffinityFunction(false, PARTS_CNT));
}
cfg.setCacheConfiguration(ccfgs);
cfg.setIncludeEventTypes(EventType.EVTS_ALL);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
stopAllGrids();
cleanPersistenceDir();
}
/**
* @throws Exception if failed.
*/
@Test
public void checkLostPartition() throws Exception {
log.info("Stop sequence: " + IntStream.of(stopNodes).boxed().collect(Collectors.toList()));
boolean safe = persistence || !(partLossPlc == IGNORE && autoAdjust);
String cacheName = CACHES[ThreadLocalRandom.current().nextInt(CACHES.length)];
Map<UUID, Set<Integer>> lostMap = new ConcurrentHashMap<>();
Set<Integer> expLostParts = prepareTopology(nodes, autoAdjust, new P1<Event>() {
@Override public boolean apply(Event evt) {
assert evt.type() == EventType.EVT_CACHE_REBALANCE_PART_DATA_LOST;
CacheRebalancingEvent cacheEvt = (CacheRebalancingEvent)evt;
lostMap.computeIfAbsent(evt.node().id(), k -> Collections.synchronizedSet(new HashSet<>())).add(cacheEvt.partition());
return true;
}
}, stopNodes);
int[] stopNodesSorted = Arrays.copyOf(stopNodes, stopNodes.length);
Arrays.sort(stopNodesSorted);
for (Ignite ig : G.allGrids()) {
if (Arrays.binarySearch(stopNodesSorted, getTestIgniteInstanceIndex(ig.name())) >= 0)
continue;
verifyCacheOps(cacheName, expLostParts, ig, safe);
}
// Check that partition state does not change after we return nodes.
for (int i = 0; i < stopNodes.length; i++) {
int node = stopNodes[i];
IgniteEx grd = startGrid(node);
info("Newly started node: " + grd.cluster().localNode().id());
}
for (int i = 0; i < nodes + 1; i++)
verifyCacheOps(cacheName, expLostParts, grid(i), safe);
if (safe)
ignite(0).resetLostPartitions(Arrays.asList(CACHES));
awaitPartitionMapExchange(true, true, null);
for (Ignite ig : G.allGrids()) {
IgniteCache<Integer, Integer> cache = ig.cache(cacheName);
assertTrue(cache.lostPartitions().isEmpty());
int parts = ig.affinity(cacheName).partitions();
for (int i = 0; i < parts; i++) {
cache.get(i);
cache.put(i, i);
}
}
if (safe) {
for (Ignite ig : G.allGrids()) {
if (Arrays.binarySearch(stopNodesSorted, getTestIgniteInstanceIndex(ig.name())) >= 0)
continue;
Set<Integer> lostParts = lostMap.get(ig.cluster().localNode().id());
assertEquals(expLostParts, lostParts);
}
}
}
/**
* @param cacheName Cache name.
* @param expLostParts Expected lost parts.
* @param ig Ignite.
* @param safe Safe.
*/
private void verifyCacheOps(String cacheName, Set<Integer> expLostParts, Ignite ig, boolean safe) {
boolean readOnly = partLossPlc == READ_ONLY_SAFE || partLossPlc == READ_ONLY_ALL;
IgniteCache<Integer, Integer> cache = ig.cache(cacheName);
int parts = ig.affinity(cacheName).partitions();
if (!safe)
assertTrue(cache.lostPartitions().isEmpty());
// Check single reads.
for (int p = 0; p < parts; p++) {
try {
Integer actual = cache.get(p);
if (safe) {
assertTrue("Reading from a lost partition should have failed [part=" + p + ']',
!cache.lostPartitions().contains(p));
assertEquals(p, actual.intValue());
}
else
assertEquals(expLostParts.contains(p) ? null : p, actual);
}
catch (CacheException e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
assertTrue("Read exception should only be triggered for a lost partition " +
"[ex=" + X.getFullStackTrace(e) + ", part=" + p + ']', cache.lostPartitions().contains(p));
}
}
// Check single writes.
for (int p = 0; p < parts; p++) {
try {
cache.put(p, p);
if (!safe && expLostParts.contains(p))
cache.remove(p);
if (readOnly) {
assertTrue(!cache.lostPartitions().contains(p));
fail("Writing to a cache containing lost partitions should have failed [part=" + p + ']');
}
if (safe) {
assertTrue("Writing to a lost partition should have failed [part=" + p + ']',
!cache.lostPartitions().contains(p));
}
}
catch (CacheException e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
assertTrue("Write exception should only be triggered for a lost partition or in read-only mode " +
"[ex=" + X.getFullStackTrace(e) + ", part=" + p + ']', readOnly || cache.lostPartitions().contains(p));
}
}
Set<Integer> notLost = IntStream.range(0, parts).boxed().filter(p -> !expLostParts.contains(p)).collect(Collectors.toSet());
try {
Map<Integer, Integer> res = cache.getAll(expLostParts);
assertFalse("Reads from lost partitions should have been allowed only in non-safe mode", safe);
}
catch (CacheException e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
try {
Map<Integer, Integer> res = cache.getAll(notLost);
}
catch (Exception e) {
fail("Reads from non lost partitions should have been always allowed");
}
try {
cache.putAll(expLostParts.stream().collect(Collectors.toMap(k -> k, v -> v)));
assertFalse("Writes to lost partitions should have been allowed only in non-safe mode", safe);
cache.removeAll(expLostParts);
}
catch (CacheException e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
try {
cache.putAll(notLost.stream().collect(Collectors.toMap(k -> k, v -> v)));
assertTrue("Writes to non-lost partitions should have been allowed only in read-write or non-safe mode",
!safe || !readOnly);
}
catch (CacheException e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
// Check queries.
for (int p = 0; p < parts; p++) {
boolean loc = ig.affinity(cacheName).isPrimary(ig.cluster().localNode(), p);
List<?> objects;
try {
objects = runQuery(ig, cacheName, false, p);
assertTrue("Query over lost partition should have failed: safe=" + safe +
", expLost=" + expLostParts + ", p=" + p, !safe || !expLostParts.contains(p));
if (safe)
assertEquals(1, objects.size());
} catch (Exception e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
try {
runQuery(ig, cacheName, false, -1);
assertFalse("Query should have failed in safe mode with lost partitions", safe);
} catch (Exception e) {
assertTrue("Query must always work in unsafe mode", safe);
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
if (loc) {
try {
objects = runQuery(ig, cacheName, true, p);
assertTrue("Query over lost partition should have failed: safe=" + safe +
", expLost=" + expLostParts + ", p=" + p, !safe || !expLostParts.contains(p));
if (safe)
assertEquals(1, objects.size());
} catch (Exception e) {
assertTrue(X.getFullStackTrace(e), X.hasCause(e, CacheInvalidStateException.class));
}
}
}
}
/**
* @param ig Ignite.
* @param cacheName Cache name.
* @param loc Local.
* @param part Partition.
*/
protected List<?> runQuery(Ignite ig, String cacheName, boolean loc, int part) {
IgniteCache cache = ig.cache(cacheName);
ScanQuery qry = new ScanQuery();
if (part != -1)
qry.setPartition(part);
if (loc)
qry.setLocal(true);
return cache.query(qry).getAll();
}
/**
* @param nodes Nodes.
* @param autoAdjust Auto adjust.
* @param lsnr Listener.
* @param stopNodes Stop nodes.
*/
private Set<Integer> prepareTopology(int nodes, boolean autoAdjust, P1<Event> lsnr, int... stopNodes) throws Exception {
final IgniteEx crd = startGrids(nodes);
crd.cluster().baselineAutoAdjustEnabled(autoAdjust);
crd.cluster().active(true);
Affinity<Object> aff = ignite(0).affinity(CACHES[0]);
for (int i = 0; i < aff.partitions(); i++) {
for (String cacheName0 : CACHES)
ignite(0).cache(cacheName0).put(i, i);
}
client = true;
startGrid(nodes);
client = false;
for (int i = 0; i < nodes; i++)
info(">>> Node [idx=" + i + ", nodeId=" + ignite(i).cluster().localNode().id() + ']');
awaitPartitionMapExchange();
Set<Integer> expLostParts = new HashSet<>();
int[] stopNodesSorted = Arrays.copyOf(stopNodes, stopNodes.length);
Arrays.sort(stopNodesSorted);
// Find partitions not owned by any remaining node.
for (int i = 0; i < PARTS_CNT; i++) {
int c = 0;
for (int idx = 0; idx < nodes; idx++) {
if (Arrays.binarySearch(stopNodesSorted, idx) < 0 && !aff.isPrimary(grid(idx).localNode(), i) && !aff.isBackup(grid(idx).localNode(), i))
c++;
}
if (c == nodes - stopNodes.length)
expLostParts.add(i);
}
assertFalse("Expecting lost partitions for the test scneario", expLostParts.isEmpty());
for (Ignite ignite : G.allGrids()) {
// Prevent rebalancing to bring partitions in owning state.
if (backups > 0) {
TestRecordingCommunicationSpi.spi(ignite).blockMessages(new IgniteBiPredicate<ClusterNode, Message>() {
@Override public boolean apply(ClusterNode clusterNode, Message msg) {
return msg instanceof GridDhtPartitionDemandMessage;
}
});
}
ignite.events().localListen(lsnr, EventType.EVT_CACHE_REBALANCE_PART_DATA_LOST);
}
for (int i = 0; i < stopNodes.length; i++)
stopGrid(stopNodes[i], true);
return expLostParts;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.ByteRangeUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.hbase.util.Strings;
import com.google.common.collect.Lists;
/**
* Individual node in a Trie structure. Each node is one of 3 types:
* <li>Branch: an internal trie node that may have a token and must have multiple children, but does
* not represent an actual input byte[], hence its numOccurrences is 0
* <li>Leaf: a node with no children and where numOccurrences is >= 1. It's token represents the
* last bytes in the input byte[]s.
* <li>Nub: a combination of a branch and leaf. Its token represents the last bytes of input
* byte[]s and has numOccurrences >= 1, but it also has child nodes which represent input byte[]s
* that add bytes to this nodes input byte[].
* <br/><br/>
* Example inputs (numInputs=7):
* 0: AAA
* 1: AAA
* 2: AAB
* 3: AAB
* 4: AAB
* 5: AABQQ
* 6: AABQQ
* <br/><br/>
* Resulting TokenizerNodes:
* AA <- branch, numOccurrences=0, tokenStartOffset=0, token.length=2
* A <- leaf, numOccurrences=2, tokenStartOffset=2, token.length=1
* B <- nub, numOccurrences=3, tokenStartOffset=2, token.length=1
* QQ <- leaf, numOccurrences=2, tokenStartOffset=3, token.length=2
* <br/><br/>
* numInputs == 7 == sum(numOccurrences) == 0 + 2 + 3 + 2
*/
@InterfaceAudience.Private
public class TokenizerNode{
/*
* Ref to data structure wrapper
*/
protected Tokenizer builder;
/******************************************************************
* Tree content/structure used during tokenization
* ****************************************************************/
/*
* ref to parent trie node
*/
protected TokenizerNode parent;
/*
* node depth in trie, irrespective of each node's token length
*/
protected int nodeDepth;
/*
* start index of this token in original byte[]
*/
protected int tokenStartOffset;
/*
* bytes for this trie node. can be length 0 in root node
*/
protected ByteRange token;
/*
* A count of occurrences in the input byte[]s, not the trie structure. 0 for branch nodes, 1+ for
* nubs and leaves. If the same byte[] is added to the trie multiple times, this is the only thing
* that changes in the tokenizer. As a result, duplicate byte[]s are very inexpensive to encode.
*/
protected int numOccurrences;
/*
* The maximum fan-out of a byte[] trie is 256, so there are a maximum of 256
* child nodes.
*/
protected ArrayList<TokenizerNode> children;
/*
* Fields used later in the encoding process for sorting the nodes into the order they'll be
* written to the output byte[]. With these fields, the TokenizerNode and therefore Tokenizer
* are not generic data structures but instead are specific to HBase PrefixTree encoding.
*/
/*
* unique id assigned to each TokenizerNode
*/
protected long id;
/*
* set >=0 for nubs and leaves
*/
protected int firstInsertionIndex = -1;
/*
* A positive value indicating how many bytes before the end of the block this node will start. If
* the section is 55 bytes and negativeOffset is 9, then the node will start at 46.
*/
protected int negativeIndex = 0;
/*
* The offset in the output array at which to start writing this node's token bytes. Influenced
* by the lengths of all tokens sorted before this one.
*/
protected int outputArrayOffset = -1;
/*********************** construct *****************************/
public TokenizerNode(Tokenizer builder, TokenizerNode parent, int nodeDepth,
int tokenStartOffset, int tokenOffset, int tokenLength) {
this.token = new SimpleMutableByteRange();
reconstruct(builder, parent, nodeDepth, tokenStartOffset, tokenOffset, tokenLength);
this.children = Lists.newArrayList();
}
/*
* Sub-constructor for initializing all fields without allocating a new object. Used by the
* regular constructor.
*/
public void reconstruct(Tokenizer builder, TokenizerNode parent, int nodeDepth,
int tokenStartOffset, int tokenOffset, int tokenLength) {
this.builder = builder;
this.id = builder.nextNodeId();
this.parent = parent;
this.nodeDepth = nodeDepth;
builder.submitMaxNodeDepthCandidate(nodeDepth);
this.tokenStartOffset = tokenStartOffset;
this.token.set(builder.tokens, tokenOffset, tokenLength);
this.numOccurrences = 1;
}
/*
* Clear the state of this node so that it looks like it was just allocated.
*/
public void reset() {
builder = null;
parent = null;
nodeDepth = 0;
tokenStartOffset = 0;
token.unset();
numOccurrences = 0;
children.clear();// branches & nubs
// ids/offsets. used during writing to byte[]
id = 0;
firstInsertionIndex = -1;// set >=0 for nubs and leaves
negativeIndex = 0;
outputArrayOffset = -1;
}
/************************* building *********************************/
/*
* <li>Only public method used during the tokenization process
* <li>Requires that the input ByteRange sort after the previous, and therefore after all previous
* inputs
* <li>Only looks at bytes of the input array that align with this node's token
*/
public void addSorted(final ByteRange bytes) {// recursively build the tree
/*
* Recurse deeper into the existing trie structure
*/
if (matchesToken(bytes) && CollectionUtils.notEmpty(children)) {
TokenizerNode lastChild = CollectionUtils.getLast(children);
if (lastChild.partiallyMatchesToken(bytes)) {
lastChild.addSorted(bytes);
return;
}
}
/*
* Recursion ended. We must either
* <li>1: increment numOccurrences if this input was equal to the previous
* <li>2: convert this node from a leaf to a nub, and add a new child leaf
* <li>3: split this node into a branch and leaf, and then add a second leaf
*/
// add it as a child of this node
int numIdenticalTokenBytes = numIdenticalBytes(bytes);// should be <= token.length
int tailOffset = tokenStartOffset + numIdenticalTokenBytes;
int tailLength = bytes.getLength() - tailOffset;
if (numIdenticalTokenBytes == token.getLength()) {
if (tailLength == 0) {// identical to this node (case 1)
incrementNumOccurrences(1);
} else {// identical to this node, but with a few extra tailing bytes. (leaf -> nub) (case 2)
int childNodeDepth = nodeDepth + 1;
int childTokenStartOffset = tokenStartOffset + numIdenticalTokenBytes;
TokenizerNode newChildNode = builder.addNode(this, childNodeDepth, childTokenStartOffset,
bytes, tailOffset);
addChild(newChildNode);
}
} else {//numIdenticalBytes > 0, split into branch/leaf and then add second leaf (case 3)
split(numIdenticalTokenBytes, bytes);
}
}
protected void addChild(TokenizerNode node) {
node.setParent(this);
children.add(node);
}
/**
* Called when we need to convert a leaf node into a branch with 2 leaves. Comments inside the
* method assume we have token BAA starting at tokenStartOffset=0 and are adding BOO. The output
* will be 3 nodes:<br/>
* <li>1: B <- branch
* <li>2: AA <- leaf
* <li>3: OO <- leaf
*
* @param numTokenBytesToRetain => 1 (the B)
* @param bytes => BOO
*/
protected void split(int numTokenBytesToRetain, final ByteRange bytes) {
int childNodeDepth = nodeDepth;
int childTokenStartOffset = tokenStartOffset + numTokenBytesToRetain;
//create leaf AA
TokenizerNode firstChild = builder.addNode(this, childNodeDepth, childTokenStartOffset,
token, numTokenBytesToRetain);
firstChild.setNumOccurrences(numOccurrences);// do before clearing this node's numOccurrences
token.setLength(numTokenBytesToRetain);//shorten current token from BAA to B
numOccurrences = 0;//current node is now a branch
moveChildrenToDifferentParent(firstChild);//point the new leaf (AA) to the new branch (B)
addChild(firstChild);//add the new leaf (AA) to the branch's (B's) children
//create leaf OO
TokenizerNode secondChild = builder.addNode(this, childNodeDepth, childTokenStartOffset,
bytes, tokenStartOffset + numTokenBytesToRetain);
addChild(secondChild);//add the new leaf (00) to the branch's (B's) children
// we inserted branch node B as a new level above/before the two children, so increment the
// depths of the children below
firstChild.incrementNodeDepthRecursively();
secondChild.incrementNodeDepthRecursively();
}
protected void incrementNodeDepthRecursively() {
++nodeDepth;
builder.submitMaxNodeDepthCandidate(nodeDepth);
for (int i = 0; i < children.size(); ++i) {
children.get(i).incrementNodeDepthRecursively();
}
}
protected void moveChildrenToDifferentParent(TokenizerNode newParent) {
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
child.setParent(newParent);
newParent.children.add(child);
}
children.clear();
}
/************************ byte[] utils *************************/
protected boolean partiallyMatchesToken(ByteRange bytes) {
return numIdenticalBytes(bytes) > 0;
}
protected boolean matchesToken(ByteRange bytes) {
return numIdenticalBytes(bytes) == getTokenLength();
}
protected int numIdenticalBytes(ByteRange bytes) {
return ByteRangeUtils.numEqualPrefixBytes(token, bytes, tokenStartOffset);
}
/***************** moving nodes around ************************/
public void appendNodesToExternalList(List<TokenizerNode> appendTo, boolean includeNonLeaves,
boolean includeLeaves) {
if (includeNonLeaves && !isLeaf() || includeLeaves && isLeaf()) {
appendTo.add(this);
}
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
child.appendNodesToExternalList(appendTo, includeNonLeaves, includeLeaves);
}
}
public int setInsertionIndexes(int nextIndex) {
int newNextIndex = nextIndex;
if (hasOccurrences()) {
setFirstInsertionIndex(nextIndex);
newNextIndex += numOccurrences;
}
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
newNextIndex = child.setInsertionIndexes(newNextIndex);
}
return newNextIndex;
}
public void appendOutputArrayOffsets(List<Integer> offsets) {
if (hasOccurrences()) {
offsets.add(outputArrayOffset);
}
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
child.appendOutputArrayOffsets(offsets);
}
}
/***************** searching *********************************/
/*
* Do a trie style search through the tokenizer. One option for looking up families or qualifiers
* during encoding, but currently unused in favor of tracking this information as they are added.
*
* Keeping code pending further performance testing.
*/
public void getNode(TokenizerRowSearchResult resultHolder, byte[] key, int keyOffset,
int keyLength) {
int thisNodeDepthPlusLength = tokenStartOffset + token.getLength();
// quick check if the key is shorter than this node (may not work for binary search)
if (CollectionUtils.isEmpty(children)) {
if (thisNodeDepthPlusLength < keyLength) {// ran out of bytes
resultHolder.set(TokenizerRowSearchPosition.NO_MATCH, null);
return;
}
}
// all token bytes must match
for (int i = 0; i < token.getLength(); ++i) {
if (key[tokenStartOffset + keyOffset + i] != token.get(i)) {
// TODO return whether it's before or after so we can binary search
resultHolder.set(TokenizerRowSearchPosition.NO_MATCH, null);
return;
}
}
if (thisNodeDepthPlusLength == keyLength && numOccurrences > 0) {
resultHolder.set(TokenizerRowSearchPosition.MATCH, this);// MATCH
return;
}
if (CollectionUtils.notEmpty(children)) {
// TODO binary search the children
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
child.getNode(resultHolder, key, keyOffset, keyLength);
if (resultHolder.isMatch()) {
return;
} else if (resultHolder.getDifference() == TokenizerRowSearchPosition.BEFORE) {
// passed it, so it doesn't exist
resultHolder.set(TokenizerRowSearchPosition.NO_MATCH, null);
return;
}
// key is still AFTER the current node, so continue searching
}
}
// checked all children (or there were no children), and didn't find it
resultHolder.set(TokenizerRowSearchPosition.NO_MATCH, null);
return;
}
/****************** writing back to byte[]'s *************************/
public byte[] getNewByteArray() {
byte[] arrayToFill = new byte[tokenStartOffset + token.getLength()];
fillInBytes(arrayToFill);
return arrayToFill;
}
public void fillInBytes(byte[] arrayToFill) {
for (int i = 0; i < token.getLength(); ++i) {
arrayToFill[tokenStartOffset + i] = token.get(i);
}
if (parent != null) {
parent.fillInBytes(arrayToFill);
}
}
/************************** printing ***********************/
@Override
public String toString() {
String s = "";
if (parent == null) {
s += "R ";
} else {
s += getBnlIndicator(false) + " " + Bytes.toString(parent.getNewByteArray());
}
s += "[" + Bytes.toString(token.deepCopyToNewArray()) + "]";
if (numOccurrences > 0) {
s += "x" + numOccurrences;
}
return s;
}
public String getPaddedTokenAndOccurrenceString() {
StringBuilder sb = new StringBuilder();
sb.append(getBnlIndicator(true));
sb.append(Strings.padFront(numOccurrences + "", ' ', 3));
sb.append(Strings.padFront(nodeDepth + "", ' ', 3));
if (outputArrayOffset >= 0) {
sb.append(Strings.padFront(outputArrayOffset + "", ' ', 3));
}
sb.append(" ");
for (int i = 0; i < tokenStartOffset; ++i) {
sb.append(" ");
}
sb.append(Bytes.toString(token.deepCopyToNewArray()).replaceAll(" ", "_"));
return sb.toString();
}
public String getBnlIndicator(boolean indent) {
if (indent) {
if (isNub()) {
return " N ";
}
return isBranch() ? "B " : " L";
}
if (isNub()) {
return "N";
}
return isBranch() ? "B" : "L";
}
/********************** count different node types ********************/
public int getNumBranchNodesIncludingThisNode() {
if (isLeaf()) {
return 0;
}
int totalFromThisPlusChildren = isBranch() ? 1 : 0;
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
totalFromThisPlusChildren += child.getNumBranchNodesIncludingThisNode();
}
return totalFromThisPlusChildren;
}
public int getNumNubNodesIncludingThisNode() {
if (isLeaf()) {
return 0;
}
int totalFromThisPlusChildren = isNub() ? 1 : 0;
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
totalFromThisPlusChildren += child.getNumNubNodesIncludingThisNode();
}
return totalFromThisPlusChildren;
}
public int getNumLeafNodesIncludingThisNode() {
if (isLeaf()) {
return 1;
}
int totalFromChildren = 0;
for (int i = 0; i < children.size(); ++i) {
TokenizerNode child = children.get(i);
totalFromChildren += child.getNumLeafNodesIncludingThisNode();
}
return totalFromChildren;
}
/*********************** simple read-only methods *******************************/
public int getNodeDepth() {
return nodeDepth;
}
public int getTokenLength() {
return token.getLength();
}
public boolean hasOccurrences() {
return numOccurrences > 0;
}
public boolean isRoot() {
return this.parent == null;
}
public int getNumChildren() {
return CollectionUtils.nullSafeSize(children);
}
public TokenizerNode getLastChild() {
if (CollectionUtils.isEmpty(children)) {
return null;
}
return CollectionUtils.getLast(children);
}
public boolean isLeaf() {
return CollectionUtils.isEmpty(children) && hasOccurrences();
}
public boolean isBranch() {
return CollectionUtils.notEmpty(children) && !hasOccurrences();
}
public boolean isNub() {
return CollectionUtils.notEmpty(children) && hasOccurrences();
}
/********************** simple mutation methods *************************/
/**
* Each occurrence > 1 indicates a repeat of the previous entry. This can be called directly by
* an external class without going through the process of detecting a repeat if it is a known
* repeat by some external mechanism. PtEncoder uses this when adding cells to a row if it knows
* the new cells are part of the current row.
* @param d increment by this amount
*/
public void incrementNumOccurrences(int d) {
numOccurrences += d;
}
/************************* autogenerated get/set ******************/
public int getTokenOffset() {
return tokenStartOffset;
}
public TokenizerNode getParent() {
return parent;
}
public ByteRange getToken() {
return token;
}
public int getNumOccurrences() {
return numOccurrences;
}
public void setParent(TokenizerNode parent) {
this.parent = parent;
}
public void setNumOccurrences(int numOccurrences) {
this.numOccurrences = numOccurrences;
}
public ArrayList<TokenizerNode> getChildren() {
return children;
}
public long getId() {
return id;
}
public int getFirstInsertionIndex() {
return firstInsertionIndex;
}
public void setFirstInsertionIndex(int firstInsertionIndex) {
this.firstInsertionIndex = firstInsertionIndex;
}
public int getNegativeIndex() {
return negativeIndex;
}
public void setNegativeIndex(int negativeIndex) {
this.negativeIndex = negativeIndex;
}
public int getOutputArrayOffset() {
return outputArrayOffset;
}
public void setOutputArrayOffset(int outputArrayOffset) {
this.outputArrayOffset = outputArrayOffset;
}
public void setId(long id) {
this.id = id;
}
public void setBuilder(Tokenizer builder) {
this.builder = builder;
}
public void setTokenOffset(int tokenOffset) {
this.tokenStartOffset = tokenOffset;
}
public void setToken(ByteRange token) {
this.token = token;
}
}
| |
package de.hpi.bpmn2xpdl;
import org.json.JSONException;
import org.json.JSONObject;
import org.xmappr.Attribute;
import org.xmappr.Element;
import org.xmappr.RootElement;
@RootElement("StartEvent")
public class XPDLStartEvent extends XMLConvertible {
@Attribute("Trigger")
protected String trigger;
@Attribute("Implementation")
protected String implementation;
@Element("TriggerConditional")
protected XPDLTriggerConditional triggerConditional;
@Element("TriggerResultMessage")
protected XPDLTriggerResultMessage triggerResultMessage;
@Element("TriggerResultSignal")
protected XPDLTriggerResultSignal triggerResultSignal;
@Element("TriggerTimer")
protected XPDLTriggerTimer triggerTimer;
public String getImplementation() {
return implementation;
}
public String getTrigger() {
return trigger;
}
public XPDLTriggerConditional getTriggerConditional() {
return triggerConditional;
}
public XPDLTriggerResultMessage getTriggerResultMessage() {
return triggerResultMessage;
}
public XPDLTriggerResultSignal getTriggerResultSignal() {
return triggerResultSignal;
}
public XPDLTriggerTimer getTriggerTimer() {
return triggerTimer;
}
public void readJSONconditionref(JSONObject modelElement) throws JSONException {
JSONObject passObject = new JSONObject();
passObject.put("condition", modelElement.optString("condition"));
passObject.put("triggerresultunknowns", modelElement.optString("triggerresultunknowns"));
XPDLTriggerConditional condition = new XPDLTriggerConditional();
condition.parse(passObject);
setTriggerConditional(condition);
}
public void readJSONimplementation(JSONObject modelElement) {
setImplementation(modelElement.optString("implementation"));
}
public void readJSONmessage(JSONObject modelElement) throws JSONException {
passInformationToTriggerResultMessage(modelElement, "message");
}
public void readJSONmessageunknowns(JSONObject modelElement) throws JSONException {
passInformationToTriggerResultMessage(modelElement, "messageunknowns");
}
public void readJSONstarteventunknowns(JSONObject modelElement) throws JSONException {
readUnknowns(modelElement, "starteventunknowns");
}
public void readJSONsignalref(JSONObject modelElement) throws JSONException {
JSONObject passObject = new JSONObject();
passObject.put("signalref", modelElement.optString("signalref"));
passObject.put("triggerresultunknowns", modelElement.optString("triggerresultunknowns"));
XPDLTriggerResultSignal signal = new XPDLTriggerResultSignal();
signal.parse(passObject);
setTriggerResultSignal(signal);
}
public void readJSONstencil(JSONObject modelElement) {
}
public void readJSONtimecycle(JSONObject modelElement) throws JSONException {
passInformationToTriggerTimer(modelElement, "timecycle");
}
public void readJSONtimedate(JSONObject modelElement) throws JSONException {
passInformationToTriggerTimer(modelElement, "timedate");
}
public void readJSONtrigger(JSONObject modelElement) {
String trigger = modelElement.optString("trigger");
if (trigger.equals("Rule")) {
setTrigger("Conditional");
} else if (modelElement.optString("stencil").equals("StartSignalEvent")) {
setTrigger("Signal");
} else {
setTrigger(trigger);
}
}
public void readJSONtriggerresultunknowns(JSONObject modelElement) {
}
public void setImplementation(String implementation) {
this.implementation = implementation;
}
public void setTrigger(String triggerValue) {
trigger = triggerValue;
}
public void setTriggerConditional(XPDLTriggerConditional triggerConditional) {
this.triggerConditional = triggerConditional;
}
public void setTriggerTimer(XPDLTriggerTimer timer) {
triggerTimer = timer;
}
public void setTriggerResultMessage(XPDLTriggerResultMessage message) {
triggerResultMessage = message;
}
public void setTriggerResultSignal(XPDLTriggerResultSignal triggerResultSignal) {
this.triggerResultSignal = triggerResultSignal;
}
public void writeJSONeventtype(JSONObject modelElement) throws JSONException {
putProperty(modelElement, "eventtype", "Start");
}
public void writeJSONimplementation(JSONObject modelElement) throws JSONException {
putProperty(modelElement, "implementation", getImplementation());
}
public void writeJSONstarteventunknowns(JSONObject modelElement) throws JSONException {
writeUnknowns(modelElement, "starteventunknowns");
}
public void writeJSONtrigger(JSONObject modelElement) throws JSONException {
String triggerValue = getTrigger();
if (triggerValue != null) {
if (triggerValue.equalsIgnoreCase("Conditional")) {
putProperty(modelElement, "trigger", "Rule");
appendStencil(modelElement, "ConditionalEvent");
} else if (triggerValue.equalsIgnoreCase("Message")) {
putProperty(modelElement, "trigger", "Message");
appendStencil(modelElement, "MessageEvent");
} else if (triggerValue.equalsIgnoreCase("Timer")) {
putProperty(modelElement, "trigger", "Timer");
appendStencil(modelElement, "TimerEvent");
} else if (triggerValue.equalsIgnoreCase("Signal")) {
//Yeah strange but true
putProperty(modelElement, "trigger", "Multiple");
appendStencil(modelElement, "SignalEvent");
} else if (triggerValue.equalsIgnoreCase("Multiple")) {
putProperty(modelElement, "trigger", "Multiple");
appendStencil(modelElement, "MultipleEvent");
} else {
putProperty(modelElement, "trigger", "None");
appendStencil(modelElement, "Event");
}
} else {
putProperty(modelElement, "trigger", "None");
appendStencil(modelElement, "Event");
}
}
public void writeJSONtriggerObjects(JSONObject modelElement) throws JSONException {
if (getTriggerConditional() != null) {
getTriggerConditional().write(modelElement);
} else if (getTriggerResultMessage() != null) {
getTriggerResultMessage().write(modelElement);
} else if (getTriggerResultSignal() != null) {
getTriggerResultSignal().write(modelElement);
} else if (getTriggerTimer() != null) {
getTriggerTimer().write(modelElement);
}
}
protected void appendStencil(JSONObject modelElement, String appendix) throws JSONException {
String newStencil = modelElement.optJSONObject("stencil").optString("id") + appendix;
JSONObject stencil = new JSONObject();
stencil.put("id", newStencil);
modelElement.put("stencil", stencil);
}
protected JSONObject getProperties(JSONObject modelElement) {
return modelElement.optJSONObject("properties");
}
protected void initializeProperties(JSONObject modelElement) throws JSONException {
JSONObject properties = modelElement.optJSONObject("properties");
if (properties == null) {
JSONObject newProperties = new JSONObject();
modelElement.put("properties", newProperties);
properties = newProperties;
}
}
protected void initializeTriggerResultMessage() {
if (getTriggerResultMessage() == null) {
setTriggerResultMessage(new XPDLTriggerResultMessage());
}
}
protected void initializeTriggerTimer() {
if (getTriggerTimer() == null) {
setTriggerTimer(new XPDLTriggerTimer());
}
}
protected void passInformationToTriggerResultMessage(JSONObject modelElement, String key) throws JSONException {
initializeTriggerResultMessage();
JSONObject passObject = new JSONObject();
passObject.put(key, modelElement.optString(key));
passObject.put("triggerresultunknowns", modelElement.optString("triggerresultunknowns"));
getTriggerResultMessage().parse(passObject);
}
protected void passInformationToTriggerTimer(JSONObject modelElement, String key) throws JSONException {
JSONObject passObject = new JSONObject();
passObject.put(key, modelElement.optString(key));
passObject.put("triggerresultunknowns", modelElement.optString("triggerresultunknowns"));
initializeTriggerTimer();
getTriggerTimer().parse(passObject);
}
protected void putProperty(JSONObject modelElement, String key, String value) throws JSONException {
initializeProperties(modelElement);
getProperties(modelElement).put(key, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.flume.ChannelException;
import org.apache.flume.ChannelFullException;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.annotations.InterfaceAudience;
import org.apache.flume.annotations.InterfaceStability;
import org.apache.flume.annotations.Recyclable;
import org.apache.flume.instrumentation.ChannelCounter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.concurrent.GuardedBy;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
/**
* <p>
* MemoryChannel is the recommended channel to use when speeds which
* writing to disk is impractical is required or durability of data is not
* required.
* </p>
* <p>
* Additionally, MemoryChannel should be used when a channel is required for
* unit testing purposes.
* </p>
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@Recyclable
public class MemoryChannel extends BasicChannelSemantics {
private static Logger LOGGER = LoggerFactory.getLogger(MemoryChannel.class);
private static final Integer defaultCapacity = 100;
private static final Integer defaultTransCapacity = 100;
private static final double byteCapacitySlotSize = 100;
private static final Long defaultByteCapacity = (long)(Runtime.getRuntime().maxMemory() * .80);
private static final Integer defaultByteCapacityBufferPercentage = 20;
private static final Integer defaultKeepAlive = 3;
private class MemoryTransaction extends BasicTransactionSemantics {
private LinkedBlockingDeque<Event> takeList;
private LinkedBlockingDeque<Event> putList;
private final ChannelCounter channelCounter;
private int putByteCounter = 0;
private int takeByteCounter = 0;
public MemoryTransaction(int transCapacity, ChannelCounter counter) {
putList = new LinkedBlockingDeque<Event>(transCapacity);
takeList = new LinkedBlockingDeque<Event>(transCapacity);
channelCounter = counter;
}
@Override
protected void doPut(Event event) throws InterruptedException {
channelCounter.incrementEventPutAttemptCount();
int eventByteSize = (int) Math.ceil(estimateEventSize(event) / byteCapacitySlotSize);
if (!putList.offer(event)) {
throw new ChannelException(
"Put queue for MemoryTransaction of capacity " +
putList.size() + " full, consider committing more frequently, " +
"increasing capacity or increasing thread count");
}
putByteCounter += eventByteSize;
}
@Override
protected Event doTake() throws InterruptedException {
channelCounter.incrementEventTakeAttemptCount();
if (takeList.remainingCapacity() == 0) {
throw new ChannelException("Take list for MemoryTransaction, capacity " +
takeList.size() + " full, consider committing more frequently, " +
"increasing capacity, or increasing thread count");
}
if (!queueStored.tryAcquire(keepAlive, TimeUnit.SECONDS)) {
return null;
}
Event event;
synchronized (queueLock) {
event = queue.poll();
}
Preconditions.checkNotNull(event, "Queue.poll returned NULL despite semaphore " +
"signalling existence of entry");
takeList.put(event);
int eventByteSize = (int) Math.ceil(estimateEventSize(event) / byteCapacitySlotSize);
takeByteCounter += eventByteSize;
return event;
}
@Override
protected void doCommit() throws InterruptedException {
int remainingChange = takeList.size() - putList.size();
if (remainingChange < 0) {
if (!bytesRemaining.tryAcquire(putByteCounter, keepAlive, TimeUnit.SECONDS)) {
throw new ChannelException("Cannot commit transaction. Byte capacity " +
"allocated to store event body " + byteCapacity * byteCapacitySlotSize +
"reached. Please increase heap space/byte capacity allocated to " +
"the channel as the sinks may not be keeping up with the sources");
}
if (!queueRemaining.tryAcquire(-remainingChange, keepAlive, TimeUnit.SECONDS)) {
bytesRemaining.release(putByteCounter);
throw new ChannelFullException("Space for commit to queue couldn't be acquired." +
" Sinks are likely not keeping up with sources, or the buffer size is too tight");
}
}
int puts = putList.size();
int takes = takeList.size();
synchronized (queueLock) {
if (puts > 0) {
while (!putList.isEmpty()) {
if (!queue.offer(putList.removeFirst())) {
throw new RuntimeException("Queue add failed, this shouldn't be able to happen");
}
}
}
putList.clear();
takeList.clear();
}
bytesRemaining.release(takeByteCounter);
takeByteCounter = 0;
putByteCounter = 0;
queueStored.release(puts);
if (remainingChange > 0) {
queueRemaining.release(remainingChange);
}
if (puts > 0) {
channelCounter.addToEventPutSuccessCount(puts);
}
if (takes > 0) {
channelCounter.addToEventTakeSuccessCount(takes);
}
channelCounter.setChannelSize(queue.size());
}
@Override
protected void doRollback() {
int takes = takeList.size();
synchronized (queueLock) {
Preconditions.checkState(queue.remainingCapacity() >= takeList.size(),
"Not enough space in memory channel " +
"queue to rollback takes. This should never happen, please report");
while (!takeList.isEmpty()) {
queue.addFirst(takeList.removeLast());
}
putList.clear();
}
putByteCounter = 0;
takeByteCounter = 0;
queueStored.release(takes);
channelCounter.setChannelSize(queue.size());
}
}
// lock to guard queue, mainly needed to keep it locked down during resizes
// it should never be held through a blocking operation
private Object queueLock = new Object();
@GuardedBy(value = "queueLock")
private LinkedBlockingDeque<Event> queue;
// invariant that tracks the amount of space remaining in the queue(with all uncommitted takeLists deducted)
// we maintain the remaining permits = queue.remaining - takeList.size()
// this allows local threads waiting for space in the queue to commit without denying access to the
// shared lock to threads that would make more space on the queue
private Semaphore queueRemaining;
// used to make "reservations" to grab data from the queue.
// by using this we can block for a while to get data without locking all other threads out
// like we would if we tried to use a blocking call on queue
private Semaphore queueStored;
// maximum items in a transaction queue
private volatile Integer transCapacity;
private volatile int keepAlive;
private volatile int byteCapacity;
private volatile int lastByteCapacity;
private volatile int byteCapacityBufferPercentage;
private Semaphore bytesRemaining;
private ChannelCounter channelCounter;
public MemoryChannel() {
super();
}
/**
* Read parameters from context
* <li>capacity = type long that defines the total number of events allowed at one time in the queue.
* <li>transactionCapacity = type long that defines the total number of events allowed in one transaction.
* <li>byteCapacity = type long that defines the max number of bytes used for events in the queue.
* <li>byteCapacityBufferPercentage = type int that defines the percent of buffer between byteCapacity and the estimated event size.
* <li>keep-alive = type int that defines the number of second to wait for a queue permit
*/
@Override
public void configure(Context context) {
Integer capacity = null;
try {
capacity = context.getInteger("capacity", defaultCapacity);
} catch (NumberFormatException e) {
capacity = defaultCapacity;
LOGGER.warn("Invalid capacity specified, initializing channel to "
+ "default capacity of {}", defaultCapacity);
}
if (capacity <= 0) {
capacity = defaultCapacity;
LOGGER.warn("Invalid capacity specified, initializing channel to "
+ "default capacity of {}", defaultCapacity);
}
try {
transCapacity = context.getInteger("transactionCapacity", defaultTransCapacity);
} catch (NumberFormatException e) {
transCapacity = defaultTransCapacity;
LOGGER.warn("Invalid transation capacity specified, initializing channel"
+ " to default capacity of {}", defaultTransCapacity);
}
if (transCapacity <= 0) {
transCapacity = defaultTransCapacity;
LOGGER.warn("Invalid transation capacity specified, initializing channel"
+ " to default capacity of {}", defaultTransCapacity);
}
Preconditions.checkState(transCapacity <= capacity,
"Transaction Capacity of Memory Channel cannot be higher than " +
"the capacity.");
try {
byteCapacityBufferPercentage = context.getInteger("byteCapacityBufferPercentage",
defaultByteCapacityBufferPercentage);
} catch (NumberFormatException e) {
byteCapacityBufferPercentage = defaultByteCapacityBufferPercentage;
}
try {
byteCapacity = (int) ((context.getLong("byteCapacity", defaultByteCapacity).longValue() *
(1 - byteCapacityBufferPercentage * .01)) / byteCapacitySlotSize);
if (byteCapacity < 1) {
byteCapacity = Integer.MAX_VALUE;
}
} catch (NumberFormatException e) {
byteCapacity = (int) ((defaultByteCapacity * (1 - byteCapacityBufferPercentage * .01)) /
byteCapacitySlotSize);
}
try {
keepAlive = context.getInteger("keep-alive", defaultKeepAlive);
} catch (NumberFormatException e) {
keepAlive = defaultKeepAlive;
}
if (queue != null) {
try {
resizeQueue(capacity);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} else {
synchronized (queueLock) {
queue = new LinkedBlockingDeque<Event>(capacity);
queueRemaining = new Semaphore(capacity);
queueStored = new Semaphore(0);
}
}
if (bytesRemaining == null) {
bytesRemaining = new Semaphore(byteCapacity);
lastByteCapacity = byteCapacity;
} else {
if (byteCapacity > lastByteCapacity) {
bytesRemaining.release(byteCapacity - lastByteCapacity);
lastByteCapacity = byteCapacity;
} else {
try {
if (!bytesRemaining.tryAcquire(lastByteCapacity - byteCapacity, keepAlive,
TimeUnit.SECONDS)) {
LOGGER.warn("Couldn't acquire permits to downsize the byte capacity, resizing has been aborted");
} else {
lastByteCapacity = byteCapacity;
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
if (channelCounter == null) {
channelCounter = new ChannelCounter(getName());
}
}
private void resizeQueue(int capacity) throws InterruptedException {
int oldCapacity;
synchronized (queueLock) {
oldCapacity = queue.size() + queue.remainingCapacity();
}
if (oldCapacity == capacity) {
return;
} else if (oldCapacity > capacity) {
if (!queueRemaining.tryAcquire(oldCapacity - capacity, keepAlive, TimeUnit.SECONDS)) {
LOGGER.warn("Couldn't acquire permits to downsize the queue, resizing has been aborted");
} else {
synchronized (queueLock) {
LinkedBlockingDeque<Event> newQueue = new LinkedBlockingDeque<Event>(capacity);
newQueue.addAll(queue);
queue = newQueue;
}
}
} else {
synchronized (queueLock) {
LinkedBlockingDeque<Event> newQueue = new LinkedBlockingDeque<Event>(capacity);
newQueue.addAll(queue);
queue = newQueue;
}
queueRemaining.release(capacity - oldCapacity);
}
}
@Override
public synchronized void start() {
channelCounter.start();
channelCounter.setChannelSize(queue.size());
channelCounter.setChannelCapacity(Long.valueOf(
queue.size() + queue.remainingCapacity()));
super.start();
}
@Override
public synchronized void stop() {
channelCounter.setChannelSize(queue.size());
channelCounter.stop();
super.stop();
}
@Override
protected BasicTransactionSemantics createTransaction() {
return new MemoryTransaction(transCapacity, channelCounter);
}
private long estimateEventSize(Event event) {
byte[] body = event.getBody();
if (body != null && body.length != 0) {
return body.length;
}
//Each event occupies at least 1 slot, so return 1.
return 1;
}
@VisibleForTesting
int getBytesRemainingValue() {
return bytesRemaining.availablePermits();
}
}
| |
package com.enstage.wibmo.util;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.util.Log;
import com.enstage.wibmo.sdk.WibmoSDK;
import com.enstage.wibmo.sdk.WibmoSDKConfig;
import com.enstage.wibmo.sdk.inapp.InAppUtil;
import com.enstage.wibmo.sdk.inapp.WibmoSDKPermissionUtil;
import com.enstage.wibmo.sdk.inapp.pojo.W2faInitRequest;
import com.enstage.wibmo.sdk.inapp.pojo.W2faInitResponse;
import com.enstage.wibmo.sdk.inapp.pojo.WPayResponse;
import com.google.gson.JsonObject;
import org.json.JSONException;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Created by Akshathkumar Shetty on 30/05/16.
*/
public class AnalyticalUtil {
private static final String TAG = "wibmo.sdk.AUtil";
private static final boolean debug = false;
private static String mToken = "";
public static void logTxn(final Context context,
final Map<String, Object> extraData,
final W2faInitRequest w2faInitRequest, final W2faInitResponse w2faInitResponse,
final int requestCode, final int resultCode, final Intent resultData) {
if(debug) Log.v(TAG, "logTxn: " + requestCode + "; " + resultCode);
if(mToken==null || mToken.isEmpty()) {
if(debug) Log.v(TAG, "mToken is not set.. will abort");
return;
}
if(debug) Log.v(TAG, "extraData: " + extraData);
Thread t = new Thread() {
public void run() {
try {
if (debug) Log.v(TAG, "thread start");
AnalyticalUtil.logTxnInternal(context,
extraData,
w2faInitRequest, w2faInitResponse,
requestCode, resultCode, resultData);
if (debug) Log.v(TAG, "thread end");
} catch (Exception e) {
if(debug) Log.e(TAG, "Error: "+e,e);
}
}
};
t.start();
}
public static void logTxnInternal(Context context,
Map<String, Object> extraData,
W2faInitRequest w2faInitRequest, W2faInitResponse w2faInitResponse,
int requestCode, int resultCode, Intent resultData) {
String eventName = "IAP-Pay";
if (requestCode == WibmoSDK.REQUEST_CODE_IAP_2FA) {
eventName = "IAP-W2fa";
}
eventName = eventName + " SDK";
Map<String, Object> data = new HashMap<>(20);
data.put("WibmoSdkVersion", WibmoSDK.VERSION);
if(extraData!=null) {
data.putAll(extraData);
}
data.put("Event_Time", (long) (System.currentTimeMillis() / 1000));
if (resultCode == Activity.RESULT_OK) {
data.put("Status", "true");
} else {
data.put("Status", "false");
}
WPayResponse wPayResponse = WibmoSDK.processInAppResponseWPay(resultData);
if(wPayResponse!=null) {
data.put("ResCode", wPayResponse.getResCode());
data.put("ResDesc", wPayResponse.getResDesc());
String wibmoTxnId = wPayResponse.getWibmoTxnId();
data.put("wibmoTxnId", wibmoTxnId);
String merAppData = wPayResponse.getMerAppData();
data.put("merAppData", merAppData);
String merTxnId = wPayResponse.getMerTxnId();
data.put("merTxnId", merTxnId);
} else {
if (data != null && resultData!=null) {
String resCode = resultData.getStringExtra("ResCode");
String resDesc = resultData.getStringExtra("ResDesc");
data.put("ResCode", resCode);
data.put("ResDesc", resDesc);
}
}
if (WibmoSDKPermissionUtil.checkSelfPermission(context, Manifest.permission.READ_PHONE_STATE)
== PackageManager.PERMISSION_GRANTED) {
PhoneInfo phoneInfo = PhoneInfo.getInstance(context);
data.put("$os", "Android");
data.put("$os_version", phoneInfo.getAndroidVersion());
data.put("$brand", phoneInfo.getPhoneMaker());
data.put("$model", phoneInfo.getPhoneModel());
data.put("$carrier", phoneInfo.getNetOperatorName());
}
if(w2faInitRequest!=null) {
if(w2faInitRequest.getCustomerInfo()!=null) {
data.put("CustMobile", w2faInitRequest.getCustomerInfo().getCustMobile());
data.put("CustEmail", w2faInitRequest.getCustomerInfo().getCustEmail());
data.put("CustName", w2faInitRequest.getCustomerInfo().getCustName());
}
if(w2faInitRequest.getTransactionInfo()!=null) {
data.put("AmountImpl", w2faInitRequest.getTransactionInfo().getTxnAmount());
data.put("Currency", w2faInitRequest.getTransactionInfo().getTxnCurrency());
}
}
if(w2faInitResponse!=null) {
if(w2faInitResponse.getTransactionInfo()!=null) {
data.put("Amount", w2faInitResponse.getTransactionInfo().getTxnFormattedAmount());
data.put("TxnAmtKnown", w2faInitResponse.getTransactionInfo().isTxnAmtKnown());
data.put("ChargeLater", w2faInitResponse.getTransactionInfo().isChargeLater());
}
if(w2faInitResponse.getMerchantInfo()!=null) {
data.put("MerName", w2faInitResponse.getMerchantInfo().getMerName());
data.put("MerProgramId", w2faInitResponse.getMerchantInfo().getMerProgramId());
data.put("MerId", w2faInitResponse.getMerchantInfo().getMerId());
}
}
try {
String dataToPost = buildEventToPostFor(context, getmToken(), eventName, data);
boolean flag = postToServer(context, dataToPost);
if(debug) Log.i(TAG, "postToServer: "+flag);
} catch (Exception e) {
if(debug) Log.e(TAG, "Error: "+e, e);
}
}
private static String buildEventToPostFor(Context context, String mToken,
String eventName, Map<String, Object> data) throws JSONException {
JsonObject dataObj = new JsonObject();
dataObj.addProperty("event", eventName);
long time = System.currentTimeMillis() / 1000;
JsonObject propertiesObj = new JsonObject();
propertiesObj.addProperty("token", mToken);
propertiesObj.addProperty("time", time);
propertiesObj.addProperty("mp_lib", "AndroidSDK");
Iterator<String> iterator = data.keySet().iterator();
String key = null;
Object value = null;
while(iterator.hasNext()) {
key = iterator.next();
value = data.get(key);
if(value instanceof Boolean) {
propertiesObj.addProperty(key, (Boolean) value);
} else if(value instanceof Integer) {
propertiesObj.addProperty(key, (Integer) value);
} else if(value instanceof Long) {
propertiesObj.addProperty(key, (Long) value);
} else {
propertiesObj.addProperty(key, (String) value);
}
}
dataObj.add("properties", propertiesObj);
String payload = InAppUtil.makeGson().toJson(dataObj);
if(debug) Log.v(TAG, "payload: "+payload);
return payload;
}
public static boolean postToServer(Context context, String payload) {
String endPoint = "https://api.mixpanel.com/track/";
try {
StringBuilder sb = new StringBuilder(100);
sb.append("data=");
sb.append(new String(Base64.encode(payload.getBytes(WibmoSDKConfig.CHARTSET))));
sb.append("&verbose=0");
if(debug) Log.v(TAG, "rawreq: "+sb.toString());
String rawres = HttpUtil.postData(endPoint,
sb.toString().getBytes(WibmoSDKConfig.CHARTSET),
false, HttpUtil.WWW_FORM);
if(debug) Log.v(TAG, "rawres: "+rawres);
if("1".equals(rawres)) {
return true;
} else {
return false;
}
} catch (IOException e) {
//TODO re-try option, save to disk and intent service
if(debug) Log.e(TAG, "Error : "+e,e);
return false;
} catch (Exception e) {
if(debug) Log.e(TAG, "Error : "+e,e);
return false;
}
}
public static String getmToken() {
return mToken;
}
public static void setmToken(String mToken) {
AnalyticalUtil.mToken = mToken;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.bootstrap.builders;
import org.apache.dubbo.config.RegistryConfig;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
class RegistryBuilderTest {
@Test
void address() {
RegistryBuilder builder = new RegistryBuilder();
builder.address("address");
Assertions.assertEquals("address", builder.build().getAddress());
}
@Test
void username() {
RegistryBuilder builder = new RegistryBuilder();
builder.username("username");
Assertions.assertEquals("username", builder.build().getUsername());
}
@Test
void password() {
RegistryBuilder builder = new RegistryBuilder();
builder.password("password");
Assertions.assertEquals("password", builder.build().getPassword());
}
@Test
void port() {
RegistryBuilder builder = new RegistryBuilder();
builder.port(8080);
Assertions.assertEquals(8080, builder.build().getPort());
}
@Test
void protocol() {
RegistryBuilder builder = new RegistryBuilder();
builder.protocol("protocol");
Assertions.assertEquals("protocol", builder.build().getProtocol());
}
@Test
void transporter() {
RegistryBuilder builder = new RegistryBuilder();
builder.transporter("transporter");
Assertions.assertEquals("transporter", builder.build().getTransporter());
}
@Test
void transport() {
RegistryBuilder builder = new RegistryBuilder();
builder.transport("transport");
Assertions.assertEquals("transport", builder.build().getTransport());
}
@Test
void server() {
RegistryBuilder builder = new RegistryBuilder();
builder.server("server");
Assertions.assertEquals("server", builder.build().getServer());
}
@Test
void client() {
RegistryBuilder builder = new RegistryBuilder();
builder.client("client");
Assertions.assertEquals("client", builder.build().getClient());
}
@Test
void cluster() {
RegistryBuilder builder = new RegistryBuilder();
builder.cluster("cluster");
Assertions.assertEquals("cluster", builder.build().getCluster());
}
@Test
void group() {
RegistryBuilder builder = new RegistryBuilder();
builder.group("group");
Assertions.assertEquals("group", builder.build().getGroup());
}
@Test
void version() {
RegistryBuilder builder = new RegistryBuilder();
builder.version("version");
Assertions.assertEquals("version", builder.build().getVersion());
}
@Test
void timeout() {
RegistryBuilder builder = new RegistryBuilder();
builder.timeout(1000);
Assertions.assertEquals(1000, builder.build().getTimeout());
}
@Test
void session() {
RegistryBuilder builder = new RegistryBuilder();
builder.session(2000);
Assertions.assertEquals(2000, builder.build().getSession());
}
@Test
void file() {
RegistryBuilder builder = new RegistryBuilder();
builder.file("file");
Assertions.assertEquals("file", builder.build().getFile());
}
@Test
void testWait() {
RegistryBuilder builder = new RegistryBuilder();
builder.wait(Integer.valueOf(1000));
Assertions.assertEquals(1000, builder.build().getWait());
}
@Test
void isCheck() {
RegistryBuilder builder = new RegistryBuilder();
builder.isCheck(true);
Assertions.assertTrue(builder.build().isCheck());
}
@Test
void isDynamic() {
RegistryBuilder builder = new RegistryBuilder();
builder.isDynamic(true);
Assertions.assertTrue(builder.build().isDynamic());
}
@Test
void register() {
RegistryBuilder builder = new RegistryBuilder();
builder.register(true);
Assertions.assertTrue(builder.build().isRegister());
}
@Test
void subscribe() {
RegistryBuilder builder = new RegistryBuilder();
builder.subscribe(true);
Assertions.assertTrue(builder.build().isSubscribe());
}
@Test
void appendParameter() {
RegistryBuilder builder = new RegistryBuilder();
builder.appendParameter("default.num", "one").appendParameter("num", "ONE");
Map<String, String> parameters = builder.build().getParameters();
Assertions.assertTrue(parameters.containsKey("default.num"));
Assertions.assertEquals("ONE", parameters.get("num"));
}
@Test
void appendParameters() {
Map<String, String> source = new HashMap<>();
source.put("default.num", "one");
source.put("num", "ONE");
RegistryBuilder builder = new RegistryBuilder();
builder.appendParameters(source);
Map<String, String> parameters = builder.build().getParameters();
Assertions.assertTrue(parameters.containsKey("default.num"));
Assertions.assertEquals("ONE", parameters.get("num"));
}
@Test
void isDefault() {
RegistryBuilder builder = new RegistryBuilder();
builder.isDefault(true);
Assertions.assertTrue(builder.build().isDefault());
}
@Test
void simplified() {
RegistryBuilder builder = new RegistryBuilder();
builder.simplified(true);
Assertions.assertTrue(builder.build().getSimplified());
}
@Test
void extraKeys() {
RegistryBuilder builder = new RegistryBuilder();
builder.extraKeys("extraKeys");
Assertions.assertEquals("extraKeys", builder.build().getExtraKeys());
}
@Test
void build() {
RegistryBuilder builder = new RegistryBuilder();
builder.address("address").username("username").password("password").port(8080).protocol("protocol")
.transporter("transporter").server("server").client("client").cluster("cluster").group("group")
.version("version").timeout(1000).session(2000).file("file").wait(Integer.valueOf(10)).isCheck(true)
.isDynamic(false).register(true).subscribe(false).isDefault(true).simplified(false).extraKeys("A")
.parameter("default.num", "one").id("id").prefix("prefix");
RegistryConfig config = builder.build();
RegistryConfig config2 = builder.build();
Assertions.assertEquals(8080, config.getPort());
Assertions.assertEquals(1000, config.getTimeout());
Assertions.assertEquals(2000, config.getSession());
Assertions.assertEquals(10, config.getWait());
Assertions.assertTrue(config.isCheck());
Assertions.assertFalse(config.isDynamic());
Assertions.assertTrue(config.isRegister());
Assertions.assertFalse(config.isSubscribe());
Assertions.assertTrue(config.isDefault());
Assertions.assertFalse(config.getSimplified());
Assertions.assertEquals("address", config.getAddress());
Assertions.assertEquals("username", config.getUsername());
Assertions.assertEquals("password", config.getPassword());
Assertions.assertEquals("protocol", config.getProtocol());
Assertions.assertEquals("transporter", config.getTransporter());
Assertions.assertEquals("server", config.getServer());
Assertions.assertEquals("client", config.getClient());
Assertions.assertEquals("cluster", config.getCluster());
Assertions.assertEquals("group", config.getGroup());
Assertions.assertEquals("version", config.getVersion());
Assertions.assertEquals("file", config.getFile());
Assertions.assertEquals("A", config.getExtraKeys());
Assertions.assertTrue(config.getParameters().containsKey("default.num"));
Assertions.assertEquals("one", config.getParameters().get("default.num"));
Assertions.assertEquals("id", config.getId());
Assertions.assertEquals("prefix", config.getPrefix());
Assertions.assertNotSame(config, config2);
}
}
| |
package bookshop2.supplier.incoming.reserveBooks;
import java.util.Set;
import javax.management.ObjectName;
import nam.model.Endpoint;
import nam.model.TransactionContext;
import nam.model.TransactionIsolationLevel;
import org.aries.Assert;
import org.aries.transport.TransportType;
import org.aries.tx.AbstractArquillianTest;
import org.aries.tx.AbstractJMSListenerArquillionTest;
import org.aries.tx.BytemanRule;
import org.aries.tx.CacheModuleTestControl;
import org.aries.tx.DataModuleTestControl;
import org.aries.tx.TransactionRegistryManagerMBean;
import org.aries.tx.TransactionTestControl;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.container.test.api.TargetsContainer;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.junit.InSequence;
import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import tx.manager.registry.ServiceRegistry;
import bookshop2.Book;
import bookshop2.ReservationAbortedException;
import bookshop2.ReservationRequestMessage;
import bookshop2.supplier.SupplierTestEARBuilder;
import bookshop2.supplier.client.reserveBooks.ReserveBooks;
import bookshop2.supplier.client.reserveBooks.ReserveBooksClient;
import bookshop2.supplier.client.reserveBooks.ReserveBooksProxyForJMS;
import bookshop2.supplier.data.bookInventory.BookInventoryHelper;
import bookshop2.supplier.data.bookInventory.BookInventoryManagerMBean;
import bookshop2.supplier.data.supplierOrderCache.SupplierOrderCacheHelper;
import bookshop2.supplier.data.supplierOrderCache.SupplierOrderCacheProxy;
import bookshop2.util.Bookshop2Fixture;
import common.tx.CoordinationConstants;
@RunAsClient
@RunWith(Arquillian.class)
public class ReserveBooksListenerForJMSCIT2 extends AbstractJMSListenerArquillionTest {
private TransactionTestControl transactionTestControl;
private DataModuleTestControl bookInventoryTestControl;
private BookInventoryHelper bookInventoryHelper;
private CacheModuleTestControl supplierOrderCacheTestControl;
private SupplierOrderCacheHelper supplierOrderCacheHelper;
private SupplierOrderCacheProxy supplierOrderCacheProxy;
private ReserveBooksClient reserveBooksClient;
private ReservationRequestMessage reservationRequestMessage;
@Override
public String getServerName() {
return "hornetQ01_local";
}
@Override
public String getDomainId() {
return "bookshop2.supplier";
}
@Override
public String getServiceId() {
return "bookshop2.supplier.ReserveBooks";
}
@Override
public String getTargetArchive() {
return SupplierTestEARBuilder.NAME;
}
@Override
public String getTargetDestination() {
return getJNDINameForQueue("inventory_bookshop2_supplier_reserve_books_queue");
}
@Override
public Class<?> getTestClass() {
return ReserveBooksListenerForJMSCIT2.class;
}
@BeforeClass
public static void beforeClass() throws Exception {
AbstractArquillianTest.beforeClass();
}
@AfterClass
public static void afterClass() throws Exception {
AbstractArquillianTest.afterClass();
}
@Before
public void setUp() throws Exception {
super.setUp();
startServer();
createTransactionControl();
createServiceLayerTestControl();
createSupplierOrderCacheTestControl();
createSupplierOrderCacheHelper();
createBookInventoryHelper();
createReservationRequestMessage();
clearBookInventoryContext();
}
protected void createTransactionControl() throws Exception {
transactionTestControl = new TransactionTestControl();
transactionTestControl.setupTransactionManager();
}
protected void createServiceLayerTestControl() throws Exception {
bookInventoryTestControl = new DataModuleTestControl(transactionTestControl);
bookInventoryTestControl.setDatabaseName("bookshop2_supplier_db");
bookInventoryTestControl.setDataSourceName("bookshop2_supplier");
bookInventoryTestControl.setPersistenceUnitName("bookInventory");
bookInventoryTestControl.setupDataLayer();
}
protected void createSupplierOrderCacheTestControl() throws Exception {
supplierOrderCacheTestControl = new CacheModuleTestControl(transactionTestControl);
supplierOrderCacheTestControl.setupCacheLayer();
}
public void clearBookInventoryContext() throws Exception {
jmxProxy.call(BookInventoryManagerMBean.MBEAN_NAME, "clearContext");
}
@After
public void tearDown() throws Exception {
transactionTestControl.tearDown();
bookInventoryTestControl.tearDown();
supplierOrderCacheTestControl.tearDown();
//removeMessagesFromTargetDestination();
clearStructures();
clearState();
super.tearDown();
}
protected void clearStructures() throws Exception {
reserveBooksClient.reset();
reserveBooksClient = null;
reservationRequestMessage = null;
}
protected void clearState() throws Exception {
super.clearState();
}
// @TargetsContainer("hornetQ01_local")
// @Deployment(name = "txManagerEAR", order = 1)
// public static EnterpriseArchive createTXManagerEAR() {
// TXManagerTestEARBuilder builder = new TXManagerTestEARBuilder();
// return builder.createEAR();
// }
@TargetsContainer("hornetQ01_local")
@Deployment(name = "supplierEAR", order = 2)
public static EnterpriseArchive createTestEAR() {
SupplierTestEARBuilder builder = new SupplierTestEARBuilder();
builder.setIncludeWar(true);
return builder.createEAR();
}
protected void createBookInventoryHelper() throws Exception {
bookInventoryHelper = new BookInventoryHelper();
bookInventoryHelper.setJmxManager(jmxManager);
bookInventoryHelper.initializeAsClient(bookInventoryTestControl);
}
public void createSupplierOrderCacheHelper() throws Exception {
supplierOrderCacheHelper = new SupplierOrderCacheHelper();
supplierOrderCacheHelper.setProxy(createSupplierOrderCacheProxy());
supplierOrderCacheHelper.initializeAsClient(supplierOrderCacheTestControl);
}
public SupplierOrderCacheProxy createSupplierOrderCacheProxy() throws Exception {
supplierOrderCacheProxy = new SupplierOrderCacheProxy();
supplierOrderCacheProxy.setJmxManager(jmxManager);
return supplierOrderCacheProxy;
}
public ReservationRequestMessage createReservationRequestMessage() {
return createReservationRequestMessage(false, false);
}
public ReservationRequestMessage createReservationRequestCancelMessage() {
return createReservationRequestMessage(true, false);
}
public ReservationRequestMessage createReservationRequestUndoMessage() {
return createReservationRequestMessage(false, true);
}
public ReservationRequestMessage createReservationRequestMessage(boolean cancel, boolean undo) {
reservationRequestMessage = Bookshop2Fixture.create_ReservationRequestMessage(cancel, undo);
return initializeMessage(reservationRequestMessage);
}
@Test
//@Ignore
@InSequence(value = 1)
//@Transactional(TransactionMode.ROLLBACK)
public void testReserveBooks_Commit() throws Exception {
String testName = "testReserveBooks_BooksAvailable_Commit";
log.info(testName+": started");
registerNotificationListeners();
bookInventoryHelper.assureRemoveAll();
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(books);
runTest();
bookInventoryHelper.verifyReservedBooksCount(2);
supplierOrderCacheHelper.verifyBooksInStockCount(0);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
//@Ignore
@InSequence(value = 2)
//@Transactional(TransactionMode.ROLLBACK)
public void testReserveBooks_Commit2() throws Exception {
String testName = "testReserveBooks_BooksAvailable_Commit2";
log.info(testName+": started");
registerNotificationListeners();
bookInventoryHelper.assureRemoveAll();
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(books);
runTest();
bookInventoryHelper.verifyReservedBooksCount(2);
supplierOrderCacheHelper.verifyBooksInStockCount(0);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
//@Ignore
@InSequence(value = 3)
public void testReserveBooks_NullCorrelationId_Rollback() throws Exception {
String testName = "testReserveBooks_NullCorrelationId_Rollback";
log.info(testName+": started");
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
expectedEvent = "bookshop2.supplier.ReserveBooks_Incoming_Request_Aborted";
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(books);
reservationRequestMessage.setCorrelationId(null);
exceptionMessage = "CorrelationId null";
isValidationErrorExpected = true;
//isExceptionExpected = true;
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
supplierOrderCacheHelper.verifyBooksInStockCount(2);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
@Ignore
@InSequence(value = 4)
public void testReserveBooks_NullTransactionId_Rollback() throws Exception {
String testName = "testReserveBooks_BooksAvailable_NullTransactionId_Rollback";
log.info(testName+": started");
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
reservationRequestMessage.setTransactionId(null);
isValidationErrorExpected = true;
//isExceptionExpected = true;
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
//@Ignore
@InSequence(value = 5)
public void testReserveBooks_NullBooks_Rollback() throws Exception {
String testName = "testReserveBooks_NullCorrelationId_Rollback";
log.info(testName+": started");
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
expectedEvent = "bookshop2.supplier.ReserveBooks_Incoming_Request_Aborted";
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(null);
exceptionMessage = "ReservationRequestMessage must include one or more books(s)";
isValidationErrorExpected = true;
//isExceptionExpected = true;
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
supplierOrderCacheHelper.verifyBooksInStockCount(2);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
//@Ignore
@InSequence(value = 6)
public void testReserveBooks_EmptyBooks_Rollback() throws Exception {
String testName = "testReserveBooks_EmptyBooks_Rollback";
log.info(testName+": started");
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
expectedEvent = "bookshop2.supplier.ReserveBooks_Incoming_Request_Aborted";
//addReservedBooksToBookInventory();
Set<Book> bookSet = Bookshop2Fixture.createSet_Book(2);
Set<Book> emptyBookSet = Bookshop2Fixture.createEmptySet_Book();
supplierOrderCacheHelper.assureAddBooksInStock(bookSet);
reservationRequestMessage.setBooks(emptyBookSet);
exceptionMessage = "ReservationRequestMessage must include one or more books(s)";
isValidationErrorExpected = true;
//isExceptionExpected = true;
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
supplierOrderCacheHelper.verifyBooksInStockCount(2);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
//@Ignore
@InSequence(value = 7)
@BytemanRule(name = "rule7",
targetClass = "SupplierProcess",
targetMethod = "handle_ReserveBooks_request",
targetLocation = "AT EXIT",
action = "throw new java.lang.RuntimeException(\"error7\")")
public void testReserveBooks_ServiceLayerException_Rollback() throws Exception {
String testName = "testReserveBooks_ServiceLayerException_Rollback";
log.info(testName+": started");
setupByteman(testName);
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
expectedEvent = "bookshop2.supplier.ReserveBooks_Incoming_Request_Aborted";
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(books);
isValidationErrorExpected = true;
exceptionClass = RuntimeException.class;
exceptionMessage = "error7";
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
supplierOrderCacheHelper.verifyBooksInStockCount(2);
tearDownByteman(testName);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
@Test
@InSequence(value = 8)
@BytemanRule(name = "rule8",
targetClass = "ReservedBooksManagerImpl",
targetMethod = "addReservedBooksRecords",
targetLocation = "AT EXIT",
action = "throw new java.lang.RuntimeException(\"error8\")")
public void testReserveBooks_DataLayerException_Rollback() throws Exception {
String testName = "testReserveBooks_DataLayerException_Rollback";
log.info(testName+": started");
setupByteman(testName);
bookInventoryHelper.assureRemoveAll();
supplierOrderCacheHelper.assureRemoveAll();
expectedEvent = "bookshop2.supplier.ReserveBooks_Incoming_Request_Aborted";
Set<Book> books = Bookshop2Fixture.createSet_Book(2);
supplierOrderCacheHelper.assureAddBooksInStock(books);
reservationRequestMessage.setBooks(books);
isValidationErrorExpected = true;
exceptionClass = RuntimeException.class;
exceptionMessage = "error8";
runTest();
bookInventoryHelper.verifyReservedBooksCount(0);
supplierOrderCacheHelper.verifyBooksInStockCount(2);
tearDownByteman(testName);
log.info(testName+": done");
if (errorMessage != null)
fail(errorMessage);
}
public void runTest() throws Exception {
if (exceptionClass != null || exceptionMessage != null)
isExceptionExpected = true;
// Prepare fixture execution
removeMessagesFromDestinations();
// prepare context
//transactionId = control.beginUserTransaction();
// prepare mocks
registerForResult();
try {
// start fixture execution
reserveBooksClient = createReserveBooksClient();
sendRequest_ReserveBooks();
} catch (ReservationAbortedException e) {
if (isExceptionExpected) {
errorMessage = e.getMessage();
if (exceptionMessage != null)
Assert.equals(e.getMessage(), exceptionMessage);
}
} catch (Exception e) {
if (isExceptionExpected) {
errorMessage = e.getMessage();
if (exceptionMessage != null)
Assert.equals(e.getMessage(), exceptionMessage);
}
}
// wait for result
Object result = waitForCompletion();
validateResult(result);
// close context
//control.commitUserTransaction();
// validate the environment
assertEmptyTargetDestination();
removeMessagesFromDestinations();
}
protected void sendRequest_ReserveBooks() throws Exception {
reserveBooksClient.reserveBooks(reservationRequestMessage);
//reserveBooksClient.send(reservationRequestMessage, correlationId, null);
}
protected void sendRequest_ReserveBooks_Cancel() throws Exception {
reservationRequestMessage = createReservationRequestCancelMessage();
sendRequest_ReserveBooks();
}
protected void sendRequest_ReserveBooks_Undo() throws Exception {
reservationRequestMessage = createReservationRequestUndoMessage();
sendRequest_ReserveBooks();
}
protected void registerNotificationListeners() throws Exception {
addRequestNotificationListeners("ReserveBooks");
}
protected void removeMessagesFromDestinations() throws Exception {
removeMessagesFromQueue(getTargetArchive(), getTargetDestination());
}
protected ReserveBooksClient createReserveBooksClient() throws Exception {
ReserveBooksProxyForJMS delegate = createReserveBooksClientDelegate();
ReserveBooksClient client = new ReserveBooksClient();
client.setTransportType(TransportType.JMS);
client.setDelegate(delegate);
return client;
}
protected ReserveBooksProxyForJMS createReserveBooksClientDelegate() throws Exception {
ReserveBooksProxyForJMS delegate = new ReserveBooksProxyForJMS(ReserveBooks.ID);
configureClient(delegate, getTargetDestination());
registerJMSProxy(delegate, ReserveBooks.ID);
delegate.setCorrelationId(correlationId);
delegate.setTransactionId(transactionId);
delegate.setTransactionContext(createTransactionContext());
delegate.setCreateTemporaryQueue(true);
return delegate;
}
protected TransactionContext createTransactionContext() {
TransactionContext transactionContext = new TransactionContext();
transactionContext.setCorrelationId(correlationId);
transactionContext.setTransactionId(transactionId);
transactionContext.setExpiration(600000L);
transactionContext.setIsolationLevel(TransactionIsolationLevel.READ_COMMITTED);
transactionContext.setRegistrationService(createRegistrationEndpoint());
return transactionContext;
}
protected Endpoint createRegistrationEndpoint() {
Endpoint endpoint = new Endpoint();
endpoint.setInstanceId(transactionId);
endpoint.setServiceName(CoordinationConstants.REGISTRATION_SERVICE_QNAME);
endpoint.setEndpointName(CoordinationConstants.REGISTRATION_ENDPOINT_QNAME);
endpoint.setEndpointUri(ServiceRegistry.getInstance().getServiceURI(CoordinationConstants.REGISTRATION_SERVICE_NAME));
return endpoint;
}
protected void addReservedBooksToBookInventory() throws Exception {
Set<Book> books = Bookshop2Fixture.createSet_Book();
addReservedBooksToBookInventory(books);
}
protected void addReservedBooksToBookInventory(Set<Book> books) throws Exception {
ObjectName objectName = new ObjectName(BookInventoryManagerMBean.MBEAN_NAME);
Object[] parameters = { books };
String[] signature = { "java.util.Set" };
jmxManager.invoke(objectName, "addToReservedBooks", parameters, signature);
jmxManager.invoke(objectName, "updateState");
}
protected void assertRolledBack() throws Exception {
}
protected int getTransactionStatus() throws Exception {
ObjectName objectName = new ObjectName(TransactionRegistryManagerMBean.MBEAN_NAME);
Object result = jmxManager.invoke(objectName, "getStatus");
if (result != null && result instanceof Integer)
return ((Integer) result).intValue();
return -1;
}
protected void validateResult(Object result) {
validateResult(result, exceptionClass, exceptionMessage);
}
protected void validateResult(Object result, Class<?> exceptionClass, String exceptionMessage) {
if (result.getClass().equals(exceptionClass)) {
Exception exception = (Exception) result;
Assert.equals(exception.getMessage(), exceptionMessage, "Unexpected exception message: "+result);
} else if (result instanceof Throwable) {
Exception exception = (Exception) result;
Assert.equals(exception.getMessage(), exceptionMessage, "Unexpected exception message: "+result);
} else if (result instanceof String) {
String resultString = result.toString();
if ((expectedError != null && !expectedError.equalsIgnoreCase(resultString)) &&
(expectedEvent != null && !expectedEvent.equalsIgnoreCase(resultString)))
errorMessage = "Unexpected message: "+result;
} else {
errorMessage = "Unrecognized result: "+result;
}
}
}
| |
package il.ac.tau.jsfuzzer.JST.Vistors;
import il.ac.tau.jsfuzzer.JST.AbsExpression;
import il.ac.tau.jsfuzzer.JST.ArrayExp;
import il.ac.tau.jsfuzzer.JST.Assignment;
import il.ac.tau.jsfuzzer.JST.Break;
import il.ac.tau.jsfuzzer.JST.Call;
import il.ac.tau.jsfuzzer.JST.Case;
import il.ac.tau.jsfuzzer.JST.CaseBlock;
import il.ac.tau.jsfuzzer.JST.Comment;
import il.ac.tau.jsfuzzer.JST.CompoundAssignment;
import il.ac.tau.jsfuzzer.JST.Continue;
import il.ac.tau.jsfuzzer.JST.Default;
import il.ac.tau.jsfuzzer.JST.DoWhile;
import il.ac.tau.jsfuzzer.JST.For;
import il.ac.tau.jsfuzzer.JST.ForEach;
import il.ac.tau.jsfuzzer.JST.FunctionDef;
import il.ac.tau.jsfuzzer.JST.FunctionExp;
import il.ac.tau.jsfuzzer.JST.Identifier;
import il.ac.tau.jsfuzzer.JST.If;
import il.ac.tau.jsfuzzer.JST.JSTNode;
import il.ac.tau.jsfuzzer.JST.Literal;
import il.ac.tau.jsfuzzer.JST.LiteralNumber;
import il.ac.tau.jsfuzzer.JST.LiteralString;
import il.ac.tau.jsfuzzer.JST.MemberExp;
import il.ac.tau.jsfuzzer.JST.ObjectExp;
import il.ac.tau.jsfuzzer.JST.OperationExp;
import il.ac.tau.jsfuzzer.JST.OutputStatement;
import il.ac.tau.jsfuzzer.JST.Program;
import il.ac.tau.jsfuzzer.JST.RawCode;
import il.ac.tau.jsfuzzer.JST.Return;
import il.ac.tau.jsfuzzer.JST.StatementsBlock;
import il.ac.tau.jsfuzzer.JST.Switch;
import il.ac.tau.jsfuzzer.JST.This;
import il.ac.tau.jsfuzzer.JST.VarDecleration;
import il.ac.tau.jsfuzzer.JST.VarDeclerator;
import il.ac.tau.jsfuzzer.JST.While;
import il.ac.tau.jsfuzzer.JST.Interfaces.JSTObject;
import il.ac.tau.jsfuzzer.JST.Interfaces.ObjectKeys;
import il.ac.tau.jsfuzzer.JST.Interfaces.Visitor;
import java.util.List;
import java.util.Map.Entry;
public class JstToTree implements Visitor
{
private int _depth = 0;
private StringBuffer _s = new StringBuffer();
public static String execute(Program program)
{
JstToTree convertor = new JstToTree();
program.accept(convertor, null);
return convertor._s.toString();
}
/**
*
* @param str
* @param node
* @return
*/
private Object trace(String str, JSTNode node)
{
if(node.isRandomNode() && node.isRandomBranch())
{
_s.append(String.format("%3d", _depth));
for (int i = 0; i < _depth; i++)
_s.append(" ");
_s.append(str+"\n");
}
return null;
}
private boolean traceIn(String str, JSTNode node)
{
trace(str, node);
if(node.isRandomBranch() && node.isRandomNode())
_depth++;
return node.isRandomBranch();
}
private void traceOut(JSTNode node) {
if (node.isRandomNode() && node.isRandomBranch()) {
_depth--;
}
}
private void visitChildrenList(List<? extends JSTObject> list) {
for (JSTObject node : list)
node.accept(this, null);
}
// ----------------------------------------------------
@Override
public Object visit(Program program, Object context)
{
if (traceIn("Program", program))
{
visitChildrenList(program.getStatements());
traceOut(program);
}
return null;
}
@Override
public Object visit(Comment comment, Object context)
{
return trace("Comment", comment);
}
@Override
public Object visit(If ifStatement, Object context)
{
if (traceIn("If", ifStatement))
{
ifStatement.getCondition().accept(this, null);
ifStatement.getStatementsBlock().accept(this, null);
if (ifStatement.hasElse())
ifStatement.getElseStatementsBlock().accept(this, null);
traceOut(ifStatement);
}
return null;
}
@Override
public Object visit(While whileStatement, Object context)
{
if (traceIn("While", whileStatement))
{
whileStatement.getCondition().accept(this, null);
whileStatement.getStatementsBlock().accept(this, null);
traceOut(whileStatement);
}
return null;
}
@Override
public Object visit(DoWhile doWhile, Object context)
{
if (traceIn("DoWhile", doWhile))
{
doWhile.getCondition().accept(this, null);
doWhile.getStatementsBlock().accept(this, null);
traceOut(doWhile);
}
return null;
}
@Override
public Object visit(For forStatement, Object context)
{
if (traceIn("For", forStatement))
{
forStatement.getInitStatement().accept(this, null);
forStatement.getConditionExpression().accept(this, null);
forStatement.getStepExpression().accept(this, null);
forStatement.getStatementsBlock().accept(this, null);
traceOut(forStatement);
}
return null;
}
@Override
public Object visit(ForEach forEach, Object context)
{
if (traceIn("ForEach", forEach))
{
forEach.getCollection().accept(this, null);
forEach.getItem().accept(this, null);
forEach.getStatementsBlock().accept(this, null);
traceOut(forEach);
}
return null;
}
@Override
public Object visit(Switch switchStatement, Object context)
{
if (traceIn("Switch", switchStatement))
{
switchStatement.getExpression().accept(this, null);
visitChildrenList(switchStatement.getCasesOps());
traceOut(switchStatement);
}
return null;
}
@Override
public Object visit(CaseBlock caseBlock, Object context)
{
if (traceIn("CaseBlock", caseBlock))
{
visitChildrenList(caseBlock.getCases());
caseBlock.getStatementBlock().accept(this, null);
traceOut(caseBlock);
}
return null;
}
@Override
public Object visit(Case myCase, Object context)
{
if (traceIn("Case", myCase))
{
myCase.getCaseExpr().accept(this, null);
traceOut(myCase);
}
return null;
}
@Override
public Object visit(Default myDefault, Object context)
{
return trace("Default", myDefault);
}
@Override
public Object visit(FunctionDef functionDefinition, Object context)
{
if (traceIn("FunctionDef", functionDefinition))
{
functionDefinition.getId().accept(this, null);
visitChildrenList(functionDefinition.getFormals());
functionDefinition.getStatementsBlock().accept(this, null);
traceOut(functionDefinition);
}
return null;
}
@Override
public Object visit(VarDecleration varDecleration, Object context)
{
if(traceIn("VarDecleration", varDecleration))
{
visitChildrenList(varDecleration.getDecleratorList());
traceOut(varDecleration);
}
return null;
}
@Override
public Object visit(VarDeclerator varDeclerator, Object context)
{
if(traceIn("VarDeclerator", varDeclerator))
{
varDeclerator.getId().accept(this, null);
if (varDeclerator.hasInit())
varDeclerator.getInit().accept(this, null);
traceOut(varDeclerator);
}
return null;
}
@Override
public Object visit(Continue continueStatement, Object context)
{
return trace("Continue", continueStatement);
}
@Override
public Object visit(Break breakStatement, Object context)
{
return trace("Break", breakStatement);
}
@Override
public Object visit(Return returnStatement, Object context)
{
if (traceIn("Return", returnStatement))
{
if (returnStatement.hasValue())
returnStatement.getValue().accept(this, null);
traceOut(returnStatement);
}
return null;
}
@Override
public Object visit(StatementsBlock stmtBlock, Object context)
{
if (traceIn("StatementsBlock", stmtBlock))
{
visitChildrenList(stmtBlock.getStatements());
traceOut(stmtBlock);
}
return null;
}
@Override
public Object visit(Assignment assignment, Object context)
{
if (traceIn("Assignment", assignment))
{
assignment.getLeftHandSide().accept(this, null);
assignment.getExpr().accept(this, null);
traceOut(assignment);
}
return null;
}
@Override
public Object visit(CompoundAssignment assignment, Object context)
{
if (traceIn(String.format("CompoundAssignment (%s)", assignment.getCompoundOp()), assignment))
{
assignment.getLeftHandSide().accept(this, null);
assignment.getExpr().accept(this, null);
traceOut(assignment);
}
return null;
}
@Override
public Object visit(Call call, Object context)
{
if (traceIn("Call", call))
{
call.getBase().accept(this, null);
visitChildrenList(call.getParams());
traceOut(call);
}
return null;
}
@Override
public Object visit(FunctionExp functionExpression, Object context)
{
if (traceIn("FunctionExp", functionExpression))
{
visitChildrenList(functionExpression.getFormals());
functionExpression.getStatementsBlock().accept(this, null);
traceOut(functionExpression);
}
return null;
}
@Override
public Object visit(MemberExp memberExpr, Object context)
{
if (traceIn("MemberExp", memberExpr))
{
memberExpr.getBase().accept(this, null);
memberExpr.getLocation().accept(this, null);
traceOut(memberExpr);
}
return null;
}
@Override
public Object visit(ObjectExp objExpr, Object context)
{
if (traceIn("ObjectExp", objExpr))
{
for (Entry<ObjectKeys, AbsExpression> item : objExpr.getMap().entrySet())
{
item.getKey().accept(this, null);
item.getValue().accept(this, null);
}
traceOut(objExpr);
}
return null;
}
@Override
public Object visit(ArrayExp arrayExpr, Object context)
{
if (traceIn("ArrayExp", arrayExpr))
{
visitChildrenList(arrayExpr.getItemsList());
traceOut(arrayExpr);
}
return null;
}
@Override
public Object visit(Identifier id, Object context)
{
return trace(String.format("Identifier (%s)", id), id);
}
@Override
public Object visit(This thisExpr, Object context)
{
return trace("This", thisExpr);
}
@Override
public Object visit(OperationExp opExp, Object context)
{
if (traceIn(String.format("OperationExp (%s)", opExp.getOperator()), opExp))
{
visitChildrenList(opExp.getOperandList());
traceOut(opExp);
}
return null;
}
@Override
public Object visit(Literal literal, Object context)
{
return trace(String.format("Literal (%s)", literal.getType()), literal);
}
@Override
public Object visit(LiteralString literal, Object context)
{
return trace("LiteralString", literal);
}
@Override
public Object visit(LiteralNumber literal, Object context)
{
return trace(String.format("LiteralNumber (%s)", literal), literal);
}
@Override
public Object visit(OutputStatement outputStmt, Object context)
{
return trace("OutputStatement", outputStmt);
}
@Override
public Object visit(RawCode rawcode, Object context)
{
/* do nothing - should not be called */
return null;
}
}
| |
/*
* Copyright (C) 2014 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.math;
import static com.google.common.base.Preconditions.checkArgument;
import static java.lang.Double.NEGATIVE_INFINITY;
import static java.lang.Double.NaN;
import static java.lang.Double.POSITIVE_INFINITY;
import static java.util.Arrays.sort;
import static java.util.Collections.unmodifiableMap;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.primitives.Doubles;
import com.google.common.primitives.Ints;
import java.math.RoundingMode;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Provides a fluent API for calculating <a
* href="http://en.wikipedia.org/wiki/Quantile">quantiles</a>.
*
* <h3>Examples</h3>
*
* <p>To compute the median:
*
* <pre>{@code
* double myMedian = median().compute(myDataset);
* }</pre>
*
* where {@link #median()} has been statically imported.
*
* <p>To compute the 99th percentile:
*
* <pre>{@code
* double myPercentile99 = percentiles().index(99).compute(myDataset);
* }</pre>
*
* where {@link #percentiles()} has been statically imported.
*
* <p>To compute median and the 90th and 99th percentiles:
*
* <pre>{@code
* Map<Integer, Double> myPercentiles =
* percentiles().indexes(50, 90, 99).compute(myDataset);
* }</pre>
*
* where {@link #percentiles()} has been statically imported: {@code myPercentiles} maps the keys
* 50, 90, and 99, to their corresponding quantile values.
*
* <p>To compute quartiles, use {@link #quartiles()} instead of {@link #percentiles()}. To compute
* arbitrary q-quantiles, use {@link #scale scale(q)}.
*
* <p>These examples all take a copy of your dataset. If you have a double array, you are okay with
* it being arbitrarily reordered, and you want to avoid that copy, you can use {@code
* computeInPlace} instead of {@code compute}.
*
* <h3>Definition and notes on interpolation</h3>
*
* <p>The definition of the kth q-quantile of N values is as follows: define x = k * (N - 1) / q; if
* x is an integer, the result is the value which would appear at index x in the sorted dataset
* (unless there are {@link Double#NaN NaN} values, see below); otherwise, the result is the average
* of the values which would appear at the indexes floor(x) and ceil(x) weighted by (1-frac(x)) and
* frac(x) respectively. This is the same definition as used by Excel and by S, it is the Type 7
* definition in <a
* href="http://stat.ethz.ch/R-manual/R-devel/library/stats/html/quantile.html">R</a>, and it is
* described by <a
* href="http://en.wikipedia.org/wiki/Quantile#Estimating_the_quantiles_of_a_population">
* wikipedia</a> as providing "Linear interpolation of the modes for the order statistics for the
* uniform distribution on [0,1]."
*
* <h3>Handling of non-finite values</h3>
*
* <p>If any values in the input are {@link Double#NaN NaN} then all values returned are {@link
* Double#NaN NaN}. (This is the one occasion when the behaviour is not the same as you'd get from
* sorting with {@link java.util.Arrays#sort(double[]) Arrays.sort(double[])} or {@link
* java.util.Collections#sort(java.util.List) Collections.sort(List<Double>)} and selecting
* the required value(s). Those methods would sort {@link Double#NaN NaN} as if it is greater than
* any other value and place them at the end of the dataset, even after {@link
* Double#POSITIVE_INFINITY POSITIVE_INFINITY}.)
*
* <p>Otherwise, {@link Double#NEGATIVE_INFINITY NEGATIVE_INFINITY} and {@link
* Double#POSITIVE_INFINITY POSITIVE_INFINITY} sort to the beginning and the end of the dataset, as
* you would expect.
*
* <p>If required to do a weighted average between an infinity and a finite value, or between an
* infinite value and itself, the infinite value is returned. If required to do a weighted average
* between {@link Double#NEGATIVE_INFINITY NEGATIVE_INFINITY} and {@link Double#POSITIVE_INFINITY
* POSITIVE_INFINITY}, {@link Double#NaN NaN} is returned (note that this will only happen if the
* dataset contains no finite values).
*
* <h3>Performance</h3>
*
* <p>The average time complexity of the computation is O(N) in the size of the dataset. There is a
* worst case time complexity of O(N^2). You are extremely unlikely to hit this quadratic case on
* randomly ordered data (the probability decreases faster than exponentially in N), but if you are
* passing in unsanitized user data then a malicious user could force it. A light shuffle of the
* data using an unpredictable seed should normally be enough to thwart this attack.
*
* <p>The time taken to compute multiple quantiles on the same dataset using {@link Scale#indexes
* indexes} is generally less than the total time taken to compute each of them separately, and
* sometimes much less. For example, on a large enough dataset, computing the 90th and 99th
* percentiles together takes about 55% as long as computing them separately.
*
* <p>When calling {@link ScaleAndIndex#compute} (in {@linkplain ScaleAndIndexes#compute either
* form}), the memory requirement is 8*N bytes for the copy of the dataset plus an overhead which is
* independent of N (but depends on the quantiles being computed). When calling {@link
* ScaleAndIndex#computeInPlace computeInPlace} (in {@linkplain ScaleAndIndexes#computeInPlace
* either form}), only the overhead is required. The number of object allocations is independent of
* N in both cases.
*
* @author Pete Gillin
* @since 20.0
*/
@Beta
@GwtIncompatible
public final class Quantiles {
/** Specifies the computation of a median (i.e. the 1st 2-quantile). */
public static ScaleAndIndex median() {
return scale(2).index(1);
}
/** Specifies the computation of quartiles (i.e. 4-quantiles). */
public static Scale quartiles() {
return scale(4);
}
/** Specifies the computation of percentiles (i.e. 100-quantiles). */
public static Scale percentiles() {
return scale(100);
}
/**
* Specifies the computation of q-quantiles.
*
* @param scale the scale for the quantiles to be calculated, i.e. the q of the q-quantiles, which
* must be positive
*/
public static Scale scale(int scale) {
return new Scale(scale);
}
/**
* Describes the point in a fluent API chain where only the scale (i.e. the q in q-quantiles) has
* been specified.
*
* @since 20.0
*/
public static final class Scale {
private final int scale;
private Scale(int scale) {
checkArgument(scale > 0, "Quantile scale must be positive");
this.scale = scale;
}
/**
* Specifies a single quantile index to be calculated, i.e. the k in the kth q-quantile.
*
* @param index the quantile index, which must be in the inclusive range [0, q] for q-quantiles
*/
public ScaleAndIndex index(int index) {
return new ScaleAndIndex(scale, index);
}
/**
* Specifies multiple quantile indexes to be calculated, each index being the k in the kth
* q-quantile.
*
* @param indexes the quantile indexes, each of which must be in the inclusive range [0, q] for
* q-quantiles; the order of the indexes is unimportant, duplicates will be ignored, and the
* set will be snapshotted when this method is called
* @throws IllegalArgumentException if {@code indexes} is empty
*/
public ScaleAndIndexes indexes(int... indexes) {
return new ScaleAndIndexes(scale, indexes.clone());
}
/**
* Specifies multiple quantile indexes to be calculated, each index being the k in the kth
* q-quantile.
*
* @param indexes the quantile indexes, each of which must be in the inclusive range [0, q] for
* q-quantiles; the order of the indexes is unimportant, duplicates will be ignored, and the
* set will be snapshotted when this method is called
* @throws IllegalArgumentException if {@code indexes} is empty
*/
public ScaleAndIndexes indexes(Collection<Integer> indexes) {
return new ScaleAndIndexes(scale, Ints.toArray(indexes));
}
}
/**
* Describes the point in a fluent API chain where the scale and a single quantile index (i.e. the
* q and the k in the kth q-quantile) have been specified.
*
* @since 20.0
*/
public static final class ScaleAndIndex {
private final int scale;
private final int index;
private ScaleAndIndex(int scale, int index) {
checkIndex(index, scale);
this.scale = scale;
this.index = index;
}
/**
* Computes the quantile value of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles (with any associated lost of precision), and which will not be mutated by
* this call (it is copied instead)
* @return the quantile value
*/
public double compute(Collection<? extends Number> dataset) {
return computeInPlace(Doubles.toArray(dataset));
}
/**
* Computes the quantile value of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will not
* be mutated by this call (it is copied instead)
* @return the quantile value
*/
public double compute(double... dataset) {
return computeInPlace(dataset.clone());
}
/**
* Computes the quantile value of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles (with any associated lost of precision), and which will not be mutated by
* this call (it is copied instead)
* @return the quantile value
*/
public double compute(long... dataset) {
return computeInPlace(longsToDoubles(dataset));
}
/**
* Computes the quantile value of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles, and which will not be mutated by this call (it is copied instead)
* @return the quantile value
*/
public double compute(int... dataset) {
return computeInPlace(intsToDoubles(dataset));
}
/**
* Computes the quantile value of the given dataset, performing the computation in-place.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, and which will
* be arbitrarily reordered by this method call
* @return the quantile value
*/
public double computeInPlace(double... dataset) {
checkArgument(dataset.length > 0, "Cannot calculate quantiles of an empty dataset");
if (containsNaN(dataset)) {
return NaN;
}
// Calculate the quotient and remainder in the integer division x = k * (N-1) / q, i.e.
// index * (dataset.length - 1) / scale. If there is no remainder, we can just find the value
// whose index in the sorted dataset equals the quotient; if there is a remainder, we
// interpolate between that and the next value.
// Since index and (dataset.length - 1) are non-negative ints, their product can be expressed
// as a long, without risk of overflow:
long numerator = (long) index * (dataset.length - 1);
// Since scale is a positive int, index is in [0, scale], and (dataset.length - 1) is a
// non-negative int, we can do long-arithmetic on index * (dataset.length - 1) / scale to get
// a rounded ratio and a remainder which can be expressed as ints, without risk of overflow:
int quotient = (int) LongMath.divide(numerator, scale, RoundingMode.DOWN);
int remainder = (int) (numerator - (long) quotient * scale);
selectInPlace(quotient, dataset, 0, dataset.length - 1);
if (remainder == 0) {
return dataset[quotient];
} else {
selectInPlace(quotient + 1, dataset, quotient + 1, dataset.length - 1);
return interpolate(dataset[quotient], dataset[quotient + 1], remainder, scale);
}
}
}
/**
* Describes the point in a fluent API chain where the scale and a multiple quantile indexes (i.e.
* the q and a set of values for the k in the kth q-quantile) have been specified.
*
* @since 20.0
*/
public static final class ScaleAndIndexes {
private final int scale;
private final int[] indexes;
private ScaleAndIndexes(int scale, int[] indexes) {
for (int index : indexes) {
checkIndex(index, scale);
}
checkArgument(indexes.length > 0, "Indexes must be a non empty array");
this.scale = scale;
this.indexes = indexes;
}
/**
* Computes the quantile values of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles (with any associated lost of precision), and which will not be mutated by
* this call (it is copied instead)
* @return an unmodifiable, ordered map of results: the keys will be the specified quantile
* indexes, and the values the corresponding quantile values. When iterating, entries in the
* map are ordered by quantile index in the same order they were passed to the {@code
* indexes} method.
*/
public Map<Integer, Double> compute(Collection<? extends Number> dataset) {
return computeInPlace(Doubles.toArray(dataset));
}
/**
* Computes the quantile values of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will not
* be mutated by this call (it is copied instead)
* @return an unmodifiable, ordered map of results: the keys will be the specified quantile
* indexes, and the values the corresponding quantile values. When iterating, entries in the
* map are ordered by quantile index in the same order they were passed to the {@code
* indexes} method.
*/
public Map<Integer, Double> compute(double... dataset) {
return computeInPlace(dataset.clone());
}
/**
* Computes the quantile values of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles (with any associated lost of precision), and which will not be mutated by
* this call (it is copied instead)
* @return an unmodifiable, ordered map of results: the keys will be the specified quantile
* indexes, and the values the corresponding quantile values. When iterating, entries in the
* map are ordered by quantile index in the same order they were passed to the {@code
* indexes} method.
*/
public Map<Integer, Double> compute(long... dataset) {
return computeInPlace(longsToDoubles(dataset));
}
/**
* Computes the quantile values of the given dataset.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, which will be
* cast to doubles, and which will not be mutated by this call (it is copied instead)
* @return an unmodifiable, ordered map of results: the keys will be the specified quantile
* indexes, and the values the corresponding quantile values. When iterating, entries in the
* map are ordered by quantile index in the same order they were passed to the {@code
* indexes} method.
*/
public Map<Integer, Double> compute(int... dataset) {
return computeInPlace(intsToDoubles(dataset));
}
/**
* Computes the quantile values of the given dataset, performing the computation in-place.
*
* @param dataset the dataset to do the calculation on, which must be non-empty, and which will
* be arbitrarily reordered by this method call
* @return an unmodifiable, ordered map of results: the keys will be the specified quantile
* indexes, and the values the corresponding quantile values. When iterating, entries in the
* map are ordered by quantile index in the same order that the indexes were passed to the
* {@code indexes} method.
*/
public Map<Integer, Double> computeInPlace(double... dataset) {
checkArgument(dataset.length > 0, "Cannot calculate quantiles of an empty dataset");
if (containsNaN(dataset)) {
Map<Integer, Double> nanMap = new LinkedHashMap<>();
for (int index : indexes) {
nanMap.put(index, NaN);
}
return unmodifiableMap(nanMap);
}
// Calculate the quotients and remainders in the integer division x = k * (N - 1) / q, i.e.
// index * (dataset.length - 1) / scale for each index in indexes. For each, if there is no
// remainder, we can just select the value whose index in the sorted dataset equals the
// quotient; if there is a remainder, we interpolate between that and the next value.
int[] quotients = new int[indexes.length];
int[] remainders = new int[indexes.length];
// The indexes to select. In the worst case, we'll need one each side of each quantile.
int[] requiredSelections = new int[indexes.length * 2];
int requiredSelectionsCount = 0;
for (int i = 0; i < indexes.length; i++) {
// Since index and (dataset.length - 1) are non-negative ints, their product can be
// expressed as a long, without risk of overflow:
long numerator = (long) indexes[i] * (dataset.length - 1);
// Since scale is a positive int, index is in [0, scale], and (dataset.length - 1) is a
// non-negative int, we can do long-arithmetic on index * (dataset.length - 1) / scale to
// get a rounded ratio and a remainder which can be expressed as ints, without risk of
// overflow:
int quotient = (int) LongMath.divide(numerator, scale, RoundingMode.DOWN);
int remainder = (int) (numerator - (long) quotient * scale);
quotients[i] = quotient;
remainders[i] = remainder;
requiredSelections[requiredSelectionsCount] = quotient;
requiredSelectionsCount++;
if (remainder != 0) {
requiredSelections[requiredSelectionsCount] = quotient + 1;
requiredSelectionsCount++;
}
}
sort(requiredSelections, 0, requiredSelectionsCount);
selectAllInPlace(
requiredSelections, 0, requiredSelectionsCount - 1, dataset, 0, dataset.length - 1);
Map<Integer, Double> ret = new LinkedHashMap<>();
for (int i = 0; i < indexes.length; i++) {
int quotient = quotients[i];
int remainder = remainders[i];
if (remainder == 0) {
ret.put(indexes[i], dataset[quotient]);
} else {
ret.put(
indexes[i], interpolate(dataset[quotient], dataset[quotient + 1], remainder, scale));
}
}
return unmodifiableMap(ret);
}
}
/** Returns whether any of the values in {@code dataset} are {@code NaN}. */
private static boolean containsNaN(double... dataset) {
for (double value : dataset) {
if (Double.isNaN(value)) {
return true;
}
}
return false;
}
/**
* Returns a value a fraction {@code (remainder / scale)} of the way between {@code lower} and
* {@code upper}. Assumes that {@code lower <= upper}. Correctly handles infinities (but not
* {@code NaN}).
*/
private static double interpolate(double lower, double upper, double remainder, double scale) {
if (lower == NEGATIVE_INFINITY) {
if (upper == POSITIVE_INFINITY) {
// Return NaN when lower == NEGATIVE_INFINITY and upper == POSITIVE_INFINITY:
return NaN;
}
// Return NEGATIVE_INFINITY when NEGATIVE_INFINITY == lower <= upper < POSITIVE_INFINITY:
return NEGATIVE_INFINITY;
}
if (upper == POSITIVE_INFINITY) {
// Return POSITIVE_INFINITY when NEGATIVE_INFINITY < lower <= upper == POSITIVE_INFINITY:
return POSITIVE_INFINITY;
}
return lower + (upper - lower) * remainder / scale;
}
private static void checkIndex(int index, int scale) {
if (index < 0 || index > scale) {
throw new IllegalArgumentException(
"Quantile indexes must be between 0 and the scale, which is " + scale);
}
}
private static double[] longsToDoubles(long[] longs) {
int len = longs.length;
double[] doubles = new double[len];
for (int i = 0; i < len; i++) {
doubles[i] = longs[i];
}
return doubles;
}
private static double[] intsToDoubles(int[] ints) {
int len = ints.length;
double[] doubles = new double[len];
for (int i = 0; i < len; i++) {
doubles[i] = ints[i];
}
return doubles;
}
/**
* Performs an in-place selection to find the element which would appear at a given index in a
* dataset if it were sorted. The following preconditions should hold:
*
* <ul>
* <li>{@code required}, {@code from}, and {@code to} should all be indexes into {@code array};
* <li>{@code required} should be in the range [{@code from}, {@code to}];
* <li>all the values with indexes in the range [0, {@code from}) should be less than or equal
* to all the values with indexes in the range [{@code from}, {@code to}];
* <li>all the values with indexes in the range ({@code to}, {@code array.length - 1}] should be
* greater than or equal to all the values with indexes in the range [{@code from}, {@code
* to}].
* </ul>
*
* This method will reorder the values with indexes in the range [{@code from}, {@code to}] such
* that all the values with indexes in the range [{@code from}, {@code required}) are less than or
* equal to the value with index {@code required}, and all the values with indexes in the range
* ({@code required}, {@code to}] are greater than or equal to that value. Therefore, the value at
* {@code required} is the value which would appear at that index in the sorted dataset.
*/
private static void selectInPlace(int required, double[] array, int from, int to) {
// If we are looking for the least element in the range, we can just do a linear search for it.
// (We will hit this whenever we are doing quantile interpolation: our first selection finds
// the lower value, our second one finds the upper value by looking for the next least element.)
if (required == from) {
int min = from;
for (int index = from + 1; index <= to; index++) {
if (array[min] > array[index]) {
min = index;
}
}
if (min != from) {
swap(array, min, from);
}
return;
}
// Let's play quickselect! We'll repeatedly partition the range [from, to] containing the
// required element, as long as it has more than one element.
while (to > from) {
int partitionPoint = partition(array, from, to);
if (partitionPoint >= required) {
to = partitionPoint - 1;
}
if (partitionPoint <= required) {
from = partitionPoint + 1;
}
}
}
/**
* Performs a partition operation on the slice of {@code array} with elements in the range [{@code
* from}, {@code to}]. Uses the median of {@code from}, {@code to}, and the midpoint between them
* as a pivot. Returns the index which the slice is partitioned around, i.e. if it returns {@code
* ret} then we know that the values with indexes in [{@code from}, {@code ret}) are less than or
* equal to the value at {@code ret} and the values with indexes in ({@code ret}, {@code to}] are
* greater than or equal to that.
*/
private static int partition(double[] array, int from, int to) {
// Select a pivot, and move it to the start of the slice i.e. to index from.
movePivotToStartOfSlice(array, from, to);
double pivot = array[from];
// Move all elements with indexes in (from, to] which are greater than the pivot to the end of
// the array. Keep track of where those elements begin.
int partitionPoint = to;
for (int i = to; i > from; i--) {
if (array[i] > pivot) {
swap(array, partitionPoint, i);
partitionPoint--;
}
}
// We now know that all elements with indexes in (from, partitionPoint] are less than or equal
// to the pivot at from, and all elements with indexes in (partitionPoint, to] are greater than
// it. We swap the pivot into partitionPoint and we know the array is partitioned around that.
swap(array, from, partitionPoint);
return partitionPoint;
}
/**
* Selects the pivot to use, namely the median of the values at {@code from}, {@code to}, and
* halfway between the two (rounded down), from {@code array}, and ensure (by swapping elements if
* necessary) that that pivot value appears at the start of the slice i.e. at {@code from}.
* Expects that {@code from} is strictly less than {@code to}.
*/
private static void movePivotToStartOfSlice(double[] array, int from, int to) {
int mid = (from + to) >>> 1;
// We want to make a swap such that either array[to] <= array[from] <= array[mid], or
// array[mid] <= array[from] <= array[to]. We know that from < to, so we know mid < to
// (although it's possible that mid == from, if to == from + 1). Note that the postcondition
// would be impossible to fulfil if mid == to unless we also have array[from] == array[to].
boolean toLessThanMid = (array[to] < array[mid]);
boolean midLessThanFrom = (array[mid] < array[from]);
boolean toLessThanFrom = (array[to] < array[from]);
if (toLessThanMid == midLessThanFrom) {
// Either array[to] < array[mid] < array[from] or array[from] <= array[mid] <= array[to].
swap(array, mid, from);
} else if (toLessThanMid != toLessThanFrom) {
// Either array[from] <= array[to] < array[mid] or array[mid] <= array[to] < array[from].
swap(array, from, to);
}
// The postcondition now holds. So the median, our chosen pivot, is at from.
}
/**
* Performs an in-place selection, like {@link #selectInPlace}, to select all the indexes {@code
* allRequired[i]} for {@code i} in the range [{@code requiredFrom}, {@code requiredTo}]. These
* indexes must be sorted in the array and must all be in the range [{@code from}, {@code to}].
*/
private static void selectAllInPlace(
int[] allRequired, int requiredFrom, int requiredTo, double[] array, int from, int to) {
// Choose the first selection to do...
int requiredChosen = chooseNextSelection(allRequired, requiredFrom, requiredTo, from, to);
int required = allRequired[requiredChosen];
// ...do the first selection...
selectInPlace(required, array, from, to);
// ...then recursively perform the selections in the range below...
int requiredBelow = requiredChosen - 1;
while (requiredBelow >= requiredFrom && allRequired[requiredBelow] == required) {
requiredBelow--; // skip duplicates of required in the range below
}
if (requiredBelow >= requiredFrom) {
selectAllInPlace(allRequired, requiredFrom, requiredBelow, array, from, required - 1);
}
// ...and then recursively perform the selections in the range above.
int requiredAbove = requiredChosen + 1;
while (requiredAbove <= requiredTo && allRequired[requiredAbove] == required) {
requiredAbove++; // skip duplicates of required in the range above
}
if (requiredAbove <= requiredTo) {
selectAllInPlace(allRequired, requiredAbove, requiredTo, array, required + 1, to);
}
}
/**
* Chooses the next selection to do from the required selections. It is required that the array
* {@code allRequired} is sorted and that {@code allRequired[i]} are in the range [{@code from},
* {@code to}] for all {@code i} in the range [{@code requiredFrom}, {@code requiredTo}]. The
* value returned by this method is the {@code i} in that range such that {@code allRequired[i]}
* is as close as possible to the center of the range [{@code from}, {@code to}]. Choosing the
* value closest to the center of the range first is the most efficient strategy because it
* minimizes the size of the subranges from which the remaining selections must be done.
*/
private static int chooseNextSelection(
int[] allRequired, int requiredFrom, int requiredTo, int from, int to) {
if (requiredFrom == requiredTo) {
return requiredFrom; // only one thing to choose, so choose it
}
// Find the center and round down. The true center is either centerFloor or halfway between
// centerFloor and centerFloor + 1.
int centerFloor = (from + to) >>> 1;
// Do a binary search until we're down to the range of two which encloses centerFloor (unless
// all values are lower or higher than centerFloor, in which case we find the two highest or
// lowest respectively). If centerFloor is in allRequired, we will definitely find it. If not,
// but centerFloor + 1 is, we'll definitely find that. The closest value to the true (unrounded)
// center will be at either low or high.
int low = requiredFrom;
int high = requiredTo;
while (high > low + 1) {
int mid = (low + high) >>> 1;
if (allRequired[mid] > centerFloor) {
high = mid;
} else if (allRequired[mid] < centerFloor) {
low = mid;
} else {
return mid; // allRequired[mid] = centerFloor, so we can't get closer than that
}
}
// Now pick the closest of the two candidates. Note that there is no rounding here.
if (from + to - allRequired[low] - allRequired[high] > 0) {
return high;
} else {
return low;
}
}
/** Swaps the values at {@code i} and {@code j} in {@code array}. */
private static void swap(double[] array, int i, int j) {
double temp = array[i];
array[i] = array[j];
array[j] = temp;
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.pysrc.internal;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Multimap;
import com.google.common.io.Files;
import com.google.template.soy.base.SourceLocation;
import com.google.template.soy.error.ErrorReporter;
import com.google.template.soy.error.SoyErrorKind;
import com.google.template.soy.internal.i18n.BidiGlobalDir;
import com.google.template.soy.internal.i18n.SoyBidiUtils;
import com.google.template.soy.pysrc.SoyPySrcOptions;
import com.google.template.soy.pysrc.internal.GenPyExprsVisitor.GenPyExprsVisitorFactory;
import com.google.template.soy.shared.internal.MainEntryPointUtils;
import com.google.template.soy.shared.internal.SoyScopedData;
import com.google.template.soy.soytree.SoyFileNode;
import com.google.template.soy.soytree.SoyFileSetNode;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Main entry point for the Python Src backend (output target).
*
* <p>Important: Do not use outside of Soy code (treat as superpackage-private).
*
*/
public final class PySrcMain {
private static final SoyErrorKind DUPLICATE_NAMESPACE_ERROR =
SoyErrorKind.of(
"Multiple files are providing the same namespace: {0}. Soy namespaces must be unique.");
/** The scope object that manages the API call scope. */
private final SoyScopedData.Enterable apiCallScope;
public PySrcMain(SoyScopedData.Enterable apiCallScope) {
this.apiCallScope = apiCallScope;
}
/**
* Generates Python source code given a Soy parse tree and an options object.
*
* @param soyTree The Soy parse tree to generate Python source code for.
* @param pySrcOptions The compilation options relevant to this backend.
* @param currentManifest The namespace manifest for current sources.
* @param errorReporter The Soy error reporter that collects errors during code generation.
* @return A list of strings where each string represents the Python source code that belongs in
* one Python file. The generated Python files correspond one-to-one to the original Soy
* source files.
*/
private List<String> genPySrc(
SoyFileSetNode soyTree,
SoyPySrcOptions pySrcOptions,
ImmutableMap<String, String> currentManifest,
ErrorReporter errorReporter) {
BidiGlobalDir bidiGlobalDir =
SoyBidiUtils.decodeBidiGlobalDirFromPyOptions(pySrcOptions.getBidiIsRtlFn());
try (SoyScopedData.InScope inScope = apiCallScope.enter(/* msgBundle= */ null, bidiGlobalDir)) {
return createVisitor(pySrcOptions, inScope.getBidiGlobalDir(), errorReporter, currentManifest)
.gen(soyTree, errorReporter);
}
}
/**
* Generates Python source files given a Soy parse tree, an options object, and information on
* where to put the output files.
*
* @param soyTree The Soy parse tree to generate Python source code for.
* @param pySrcOptions The compilation options relevant to this backend.
* @param outputPathFormat The format string defining how to build the output file path
* corresponding to an input file path.
* @param errorReporter The Soy error reporter that collects errors during code generation.
* @throws IOException If there is an error in opening/writing an output Python file.
*/
public void genPyFiles(
SoyFileSetNode soyTree,
SoyPySrcOptions pySrcOptions,
String outputPathFormat,
ErrorReporter errorReporter)
throws IOException {
ImmutableList<SoyFileNode> srcsToCompile = ImmutableList.copyOf(soyTree.getChildren());
// Determine the output paths.
List<String> soyNamespaces = getSoyNamespaces(soyTree);
Multimap<String, Integer> outputs =
MainEntryPointUtils.mapOutputsToSrcs(null, outputPathFormat, srcsToCompile);
// Generate the manifest and add it to the current manifest.
ImmutableMap<String, String> manifest = generateManifest(soyNamespaces, outputs, errorReporter);
// Generate the Python source.
List<String> pyFileContents = genPySrc(soyTree, pySrcOptions, manifest, errorReporter);
if (srcsToCompile.size() != pyFileContents.size()) {
throw new AssertionError(
String.format(
"Expected to generate %d code chunk(s), got %d",
srcsToCompile.size(), pyFileContents.size()));
}
// Write out the Python outputs.
for (String outputFilePath : outputs.keySet()) {
try (Writer out = Files.newWriter(new File(outputFilePath), StandardCharsets.UTF_8)) {
for (int inputFileIndex : outputs.get(outputFilePath)) {
out.write(pyFileContents.get(inputFileIndex));
}
}
}
// Write out the manifest file.
if (pySrcOptions.namespaceManifestFile() != null) {
try (Writer out =
Files.newWriter(new File(pySrcOptions.namespaceManifestFile()), StandardCharsets.UTF_8)) {
for (String namespace : manifest.keySet()) {
out.write(String.format("%s=%s\n", namespace, manifest.get(namespace)));
}
}
}
}
/**
* Generate the manifest file by finding the output file paths and converting them into a Python
* import format.
*/
private static ImmutableMap<String, String> generateManifest(
List<String> soyNamespaces, Multimap<String, Integer> outputs, ErrorReporter errorReporter) {
Map<String, String> manifest = new HashMap<>();
for (String outputFilePath : outputs.keySet()) {
for (int inputFileIndex : outputs.get(outputFilePath)) {
String pythonPath = outputFilePath.replace(".py", "").replace('/', '.');
String namespace = soyNamespaces.get(inputFileIndex);
if (manifest.containsKey(namespace)) {
errorReporter.report(SourceLocation.UNKNOWN, DUPLICATE_NAMESPACE_ERROR, namespace);
}
manifest.put(namespace, pythonPath);
}
}
return ImmutableMap.copyOf(manifest);
}
private List<String> getSoyNamespaces(SoyFileSetNode soyTree) {
List<String> namespaces = new ArrayList<>();
for (SoyFileNode soyFile : soyTree.getChildren()) {
namespaces.add(soyFile.getNamespace());
}
return namespaces;
}
@VisibleForTesting
static GenPyCodeVisitor createVisitor(
SoyPySrcOptions pySrcOptions,
BidiGlobalDir bidiGlobalDir,
ErrorReporter errorReporter,
ImmutableMap<String, String> currentManifest) {
final IsComputableAsPyExprVisitor isComputableAsPyExprs = new IsComputableAsPyExprVisitor();
// There is a circular dependency between the GenPyExprsVisitorFactory and GenPyCallExprVisitor
// here we resolve it with a mutable field in a custom provider
final PythonValueFactoryImpl pluginValueFactory =
new PythonValueFactoryImpl(errorReporter, bidiGlobalDir);
class PyCallExprVisitorSupplier implements Supplier<GenPyCallExprVisitor> {
GenPyExprsVisitorFactory factory;
@Override
public GenPyCallExprVisitor get() {
return new GenPyCallExprVisitor(
isComputableAsPyExprs, pluginValueFactory, checkNotNull(factory));
}
}
PyCallExprVisitorSupplier provider = new PyCallExprVisitorSupplier();
GenPyExprsVisitorFactory genPyExprsFactory =
new GenPyExprsVisitorFactory(isComputableAsPyExprs, pluginValueFactory, provider);
provider.factory = genPyExprsFactory;
return new GenPyCodeVisitor(
pySrcOptions,
currentManifest,
isComputableAsPyExprs,
genPyExprsFactory,
provider.get(),
pluginValueFactory);
}
}
| |
/*
* Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
* All rights reserved.
*
* This material may be used, modified, or reproduced by or for the U.S.
* Government pursuant to the rights granted under the clauses at
* DFARS 252.227-7013/7014 or FAR 52.227-14.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* NO WARRANTY. THIS MATERIAL IS PROVIDED "AS IS." JHU/APL DISCLAIMS ALL
* WARRANTIES IN THE MATERIAL, WHETHER EXPRESS OR IMPLIED, INCLUDING (BUT NOT
* LIMITED TO) ANY AND ALL IMPLIED WARRANTIES OF PERFORMANCE,
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT OF
* INTELLECTUAL PROPERTY RIGHTS. ANY USER OF THE MATERIAL ASSUMES THE ENTIRE
* RISK AND LIABILITY FOR USING THE MATERIAL. IN NO EVENT SHALL JHU/APL BE
* LIABLE TO ANY USER OF THE MATERIAL FOR ANY ACTUAL, INDIRECT,
* CONSEQUENTIAL, SPECIAL OR OTHER DAMAGES ARISING FROM THE USE OF, OR
* INABILITY TO USE, THE MATERIAL, INCLUDING, BUT NOT LIMITED TO, ANY DAMAGES
* FOR LOST PROFITS.
*/
package edu.jhuapl.openessence.controller;
import edu.jhuapl.openessence.datasource.Dimension;
import edu.jhuapl.openessence.datasource.OeDataSourceAccessException;
import edu.jhuapl.openessence.datasource.entry.ChildRecordSet;
import edu.jhuapl.openessence.datasource.entry.CompleteRecord;
import edu.jhuapl.openessence.datasource.entry.DbKeyValMap;
import edu.jhuapl.openessence.datasource.jdbc.JdbcOeDataSource;
import edu.jhuapl.openessence.datasource.jdbc.entry.JdbcOeDataEntrySource;
import edu.jhuapl.openessence.datasource.jdbc.entry.TableAwareQueryRecord;
import edu.jhuapl.openessence.model.DeleteRequest;
import edu.jhuapl.openessence.upload.FileImporter;
import edu.jhuapl.openessence.upload.FileImporterRegistry;
import edu.jhuapl.openessence.web.util.ControllerUtils;
import edu.jhuapl.openessence.web.util.ErrorMessageException;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.context.request.WebRequest;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
@Controller
@RequestMapping("/input")
public class InputController extends OeController {
private final Logger log = LoggerFactory.getLogger(getClass());
@Inject
private FileImporterRegistry fileImporters;
/**
* Add record to the database with the provided values. Response should be a JSON formatted collection with #success
* and #record fields indicating status of request and generated keys (if appropriate) for new record.
*
* @param ds data source to be updated
* @param request request object containing parameters such as data source, field values, etc.
*/
@RequestMapping(value = "/add", method = {POST, PUT}) // POST and PUT b/c we're very un-RESTful
@ResponseBody
public Map<String, Object> add(@RequestParam("dsId") JdbcOeDataSource ds, final HttpServletRequest request)
throws ErrorMessageException, OeDataSourceAccessException, IOException {
JdbcOeDataEntrySource jdes = (JdbcOeDataEntrySource) ds;
Set<String> pks = jdes.getParentTableDetails().getPks();
// get parent dimensions/values
Map<String, Dimension> dimensions = new HashMap<String, Dimension>();
Map<String, Object> values = new HashMap<String, Object>();
for (Dimension dimension : jdes.getEditDimensions()) {
dimensions.put(dimension.getId(), dimension);
// Auto generated, special sql, and pk dimensions are not required on adds
boolean isRequired = (jdes.getAutoGeneratedDimension(dimension.getId()) == null
&& jdes.getSpecialSqlDimension(dimension.getId()) == null
&& pks.contains(dimension.getId()));
values.putAll(ControllerUtils.formatData(dimension.getId(), request.getParameter(dimension.getId()),
dimension.getSqlType(), isRequired));
}
CompleteRecord completeRecord = new CompleteRecord(new TableAwareQueryRecord(jdes.getTableName(), pks,
dimensions, values),
ControllerUtils.getChildRecordSets(jdes, request, true));
return jdes.addCompleteRecord(completeRecord, false);
}
@RequestMapping(value = "/update", method = {POST, PUT})
@ResponseBody
public Map<String, Object> update(@RequestParam("dsId") JdbcOeDataSource ds, WebRequest request,
HttpServletRequest servletRequest)
throws ErrorMessageException, OeDataSourceAccessException, IOException {
JdbcOeDataEntrySource jdes = (JdbcOeDataEntrySource) ds;
// find primary keys for record
DbKeyValMap dbKeyValMap = ControllerUtils.parseKeyValueMap(jdes, request.getParameterMap());
// retrieve existing record and children
CompleteRecord
completeRecord =
jdes.getCompleteRecord(dbKeyValMap, new ArrayList<String>(jdes.getChildTableMap().keySet()));
// Option to only update parameter values on the completeRecord that
// are included as part of the request (when merge parameter is true)
// Defaults to false (nullify parameter values not included on request)
boolean merge = Boolean.valueOf(request.getParameter("merge"));
// parent record's values are replaced with request param values
for (String field : completeRecord.getParentRecord().getValues().keySet()) {
String parameter = request.getParameter(field);
TableAwareQueryRecord parentRecord = completeRecord.getParentRecord();
if (parameter != null) {
parentRecord.getValues().putAll(ControllerUtils.formatData(field, parameter,
parentRecord.getEditDimensions().get(field)
.getSqlType(),
dbKeyValMap.keySet().contains(field)));
} else if (!merge) {
// NEEDS additional flags for data sources using default input panels
// nullify parameter values on the complete record, if it is an edit dimension
parentRecord.getValues().put(field, null);
}
}
// remove existing children
for (ChildRecordSet childRecordSet : completeRecord.getChildrenRecordSets()) {
childRecordSet.removeAllChildRecords();
}
completeRecord.setChildrenRecordSets(ControllerUtils.getChildRecordSets(jdes, servletRequest, false));
jdes.updateCompleteRecord(dbKeyValMap, completeRecord);
Map<String, Object> data = data(ds, request);// new HashMap<String, Object>();
data.put("success", true);
return data; // TODO return RESTful response, i.e. data actually updated
}
@RequestMapping(value = "/data", method = GET)
@ResponseBody
public Map<String, Object> data(@RequestParam("dsId") JdbcOeDataSource ds, WebRequest request)
throws ErrorMessageException, OeDataSourceAccessException {
JdbcOeDataEntrySource jdes = (JdbcOeDataEntrySource) ds;
DbKeyValMap dbKeyValMap = new DbKeyValMap();
String doNotParseKeys = request.getParameter("doNotParseKeys");
if (doNotParseKeys == null || !doNotParseKeys.equalsIgnoreCase("true")) {
dbKeyValMap = ControllerUtils.parseKeyValueMap(jdes, request.getParameterMap());
}
// retrieve existing record and children
CompleteRecord completeRecord = jdes.getCompleteRecord(dbKeyValMap,
new ArrayList<String>(jdes.getChildTableMap().keySet()));
Map<String, Object> data = ControllerUtils.mapDataAndFormatTimeForResponse(completeRecord.getParentRecord()
.getValues().keySet(),
completeRecord.getParentRecord()
.getValues());
// Children
for (ChildRecordSet childRecordSet : completeRecord.getChildrenRecordSets()) {
List<Object> childRecords = new ArrayList<Object>();
for (TableAwareQueryRecord tableAwareQueryRecord : childRecordSet.getChildRecords()) {
childRecords.add(ControllerUtils
.mapDataAndFormatTimeForResponse(tableAwareQueryRecord.getValues().keySet(),
tableAwareQueryRecord.getValues()));
}
data.put(childRecordSet.getChildTableName(), childRecords);
}
return data;
}
/**
* @param ds data source ID sent on URL
* @param body JSON POST body
*/
@RequestMapping(value = "/delete",
// FIXME this should obviously be DELETE, but we send an entity body
method = POST)
@ResponseBody
public Map<String, Object> delete(@RequestParam("dsId") JdbcOeDataSource ds, @RequestBody DeleteRequest body)
throws IOException, ErrorMessageException, OeDataSourceAccessException {
JdbcOeDataEntrySource jdes = (JdbcOeDataEntrySource) ds;
List<DbKeyValMap> pksForDeletion = new ArrayList<DbKeyValMap>();
for (Map<String, String> pks : body.getPkIds()) {
pksForDeletion.add(ControllerUtils.parseKeyValueMap(jdes, pks));
}
jdes.deleteQueryRecords(jdes.getTableName(), pksForDeletion);
// Build/write response
Map<String, Object> data = new HashMap<String, Object>();
data.put("success", true);
return data; // TODO return RESTful response, i.e. data actually deleted
}
@RequestMapping(value = "/importExcel", method = POST)
public void importExcel(@RequestPart MultipartFile file, @RequestParam("dsId") JdbcOeDataSource ds,
HttpServletResponse response)
throws IOException, ServletException {
ObjectMapper mapper = new ObjectMapper();
try {
// Ext needs this for the crazy way it does file uploads
// it's normally bad to manually write JSON, but dealing with a custom Spring MessageConverter seems like overkill
response.setContentType("text/html;charset=utf-8");
FileImporter<?> importer = fileImporters.get(ds);
if (importer == null) {
log.error("No file importer configured for data source {}", ds.getDataSourceId());
throw new IllegalArgumentException("No file importer configured");
}
response.getWriter().write(mapper.writeValueAsString(importer.importFile(file)));
} catch (Exception e) {
// respond to exception as normal, but with content type text/html
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().write(mapper.writeValueAsString(handleException(e)));
}
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.entity;
import co.aikar.timings.Timing;
import com.google.common.base.MoreObjects;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.util.DamageSource;
import org.spongepowered.api.entity.EntityType;
import org.spongepowered.api.text.translation.Translation;
import org.spongepowered.common.SpongeCatalogType;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.SpongeImplHooks;
import org.spongepowered.common.config.SpongeConfig;
import org.spongepowered.common.config.category.EntityTrackerCategory;
import org.spongepowered.common.config.category.EntityTrackerModCategory;
import org.spongepowered.common.config.type.TrackerConfig;
import org.spongepowered.common.relocate.co.aikar.timings.SpongeTimings;
import org.spongepowered.common.text.translation.SpongeTranslation;
import java.util.Locale;
import javax.annotation.Nullable;
public class SpongeEntityType extends SpongeCatalogType.Translatable implements EntityType {
public static final EntityType UNKNOWN = new SpongeEntityType(-999999, "Unknown", "unknown", Entity.class, new SpongeTranslation("entity.generic.name")) {
@Override
public String getName() {
return "Unknown";
}
@Override
public Class<? extends org.spongepowered.api.entity.Entity> getEntityClass() {
throw new UnsupportedOperationException("Unknown entity type has no entity class");
}
@Override
public void initializeTrackerState() {
// no need to initialize
}
@Override
public boolean isKnown() {
return false;
}
};
public final int entityTypeId;
public final String entityName;
public final String modId;
public final Class<? extends Entity> entityClass;
private final boolean isVanilla;
private EnumCreatureType creatureType;
private boolean activationRangeInitialized = false;
@Nullable private Timing timing = null;
// Used by tracker config
public boolean allowsBlockBulkCapture = true;
public boolean allowsEntityBulkCapture = true;
public boolean allowsBlockEventCreation = true;
public boolean allowsEntityEventCreation = true;
public boolean isModdedDamageEntityMethod = false;
public SpongeEntityType(int id, String name, Class<? extends Entity> clazz, Translation translation) {
this(id, name.toLowerCase(Locale.ENGLISH), "minecraft", clazz, translation);
}
public SpongeEntityType(int id, String name, String modId, Class<? extends Entity> clazz, Translation translation) {
super(modId.toLowerCase(Locale.ENGLISH) + ":" + name.toLowerCase(Locale.ENGLISH), check(translation));
this.entityTypeId = id;
this.entityName = name.toLowerCase(Locale.ENGLISH);
this.entityClass = clazz;
this.modId = modId.toLowerCase(Locale.ENGLISH);
this.isVanilla = this.entityClass.getName().startsWith("net.minecraft.");
this.initializeTrackerState();
}
private static Translation check(@Nullable Translation translation) {
if (translation == null) {
return UNKNOWN.getTranslation();
}
return translation;
}
@Override
public String getName() {
return this.entityName;
}
public String getModId() {
return this.modId;
}
@Nullable
public EnumCreatureType getEnumCreatureType() {
return this.creatureType;
}
public void setEnumCreatureType(EnumCreatureType type) {
this.creatureType = type;
}
public boolean isActivationRangeInitialized() {
return this.activationRangeInitialized;
}
public void setActivationRangeInitialized(boolean flag) {
this.activationRangeInitialized = flag;
}
public void initializeTrackerState() {
final SpongeConfig<TrackerConfig> trackerConfigAdapter = SpongeImpl.getTrackerConfigAdapter();
final EntityTrackerCategory entityTrackerCat = trackerConfigAdapter.getConfig().getEntityTracker();
EntityTrackerModCategory entityTrackerModCat = entityTrackerCat.getModMappings().get(this.modId);
if (entityTrackerModCat == null) {
entityTrackerModCat = new EntityTrackerModCategory();
entityTrackerCat.getModMappings().put(this.modId, entityTrackerModCat);
}
if (!entityTrackerModCat.isEnabled()) {
this.allowsBlockBulkCapture = false;
this.allowsEntityBulkCapture = false;
this.allowsBlockEventCreation = false;
this.allowsEntityEventCreation = false;
entityTrackerModCat.getBlockBulkCaptureMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> this.allowsBlockBulkCapture);
entityTrackerModCat.getEntityBulkCaptureMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> this.allowsEntityBulkCapture);
entityTrackerModCat.getBlockEventCreationMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> this.allowsBlockEventCreation);
entityTrackerModCat.getEntityEventCreationMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> this.allowsEntityEventCreation);
} else {
this.allowsBlockBulkCapture = entityTrackerModCat.getBlockBulkCaptureMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> true);
this.allowsEntityBulkCapture = entityTrackerModCat.getEntityBulkCaptureMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> true);
this.allowsBlockEventCreation = entityTrackerModCat.getBlockEventCreationMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> true);
this.allowsEntityEventCreation = entityTrackerModCat.getEntityEventCreationMap().computeIfAbsent(this.entityName.toLowerCase(Locale.ENGLISH), k -> true);
}
if (entityTrackerCat.autoPopulateData()) {
trackerConfigAdapter.save();
}
try {
String mapping = SpongeImplHooks.isDeobfuscatedEnvironment() ? "damageEntity" : "func_70665_d";
Class<?>[] argTypes = {DamageSource.class, float.class };
Class<?> clazz = this.getClass().getMethod(mapping, argTypes).getDeclaringClass();
if (!(clazz.equals(EntityLivingBase.class) || clazz.equals(EntityPlayer.class) || clazz.equals(EntityPlayerMP.class))) {
this.isModdedDamageEntityMethod = true;
}
} catch (Throwable ex) {
// ignore
}
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends org.spongepowered.api.entity.Entity> getEntityClass() {
return (Class<? extends org.spongepowered.api.entity.Entity>) this.entityClass;
}
@Override
protected MoreObjects.ToStringHelper toStringHelper() {
return super.toStringHelper()
.add("modid", this.modId)
.add("class", this.entityClass.getName());
}
public Timing getTimingsHandler() {
if (this.timing == null) {
this.timing = SpongeTimings.getEntityTiming(this);
}
return this.timing;
}
public boolean isKnown() {
return true;
}
public boolean isVanilla() {
return this.isVanilla;
}
}
| |
/*
* ******************************************************************************
* * Copyright 2015 See AUTHORS file.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *****************************************************************************
*/
package com.o2d.pkayjava.editor.view.ui.dialog;
import java.util.HashMap;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.scenes.scene2d.Action;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.Touchable;
import com.badlogic.gdx.scenes.scene2d.actions.Actions;
import com.badlogic.gdx.scenes.scene2d.ui.Image;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.badlogic.gdx.utils.Align;
import com.kotcrab.vis.ui.VisUI;
import com.kotcrab.vis.ui.widget.VisLabel;
import com.kotcrab.vis.ui.widget.VisProgressBar;
import com.kotcrab.vis.ui.widget.VisTable;
import com.kotcrab.vis.ui.widget.VisTextButton;
import com.o2d.pkayjava.editor.Overlap2DFacade;
import com.commons.UIDraggablePanel;
import com.o2d.pkayjava.editor.utils.ImportUtils;
public class ImportDialog extends UIDraggablePanel {
private static final String TAG;
public static final String NAME;
public static final String BROWSE_BTN_CLICKED;
public static final String CANCEL_BTN_CLICKED;
public static final String IMPORT_BTN_CLICKED;
static {
TAG = ImportDialog.class.getName();
NAME = TAG;
BROWSE_BTN_CLICKED = NAME + "." + "BROWSE_BTN_CLICKED";
CANCEL_BTN_CLICKED = NAME + "." + "CANCEL_BTN_CLICKED";
IMPORT_BTN_CLICKED = NAME + "." + "IMPORT_BTN_CLICKED";
}
private Overlap2DFacade facade;
private VisTable mainTable;
private Image dropRegion;
private VisLabel errorLabel;
private VisProgressBar progressBar;
private HashMap<Integer, String> typeNames = new HashMap<>();
ImportDialog() {
super("Import Resources");
setMovable(true);
setModal(false);
addCloseButton();
setStyle(VisUI.getSkin().get("box", WindowStyle.class));
getTitleLabel().setAlignment(Align.left);
setWidth(250);
setHeight(100);
facade = Overlap2DFacade.getInstance();
fillTypeNames();
mainTable = new VisTable();
add(mainTable).fill().expand();
setDroppingView();
errorLabel = new VisLabel("File you selected was too sexy to import");
errorLabel.setColor(Color.RED);
addActor(errorLabel);
errorLabel.getColor().a = 0;
errorLabel.setTouchable(Touchable.disabled);
}
private void fillTypeNames() {
typeNames.clear();
typeNames.put(ImportUtils.TYPE_ANIMATION_PNG_SEQUENCE, "PNG Sequence Animation");
typeNames.put(ImportUtils.TYPE_BITMAP_FONT, "Bitmap Font");
typeNames.put(ImportUtils.TYPE_IMAGE, "Texture");
typeNames.put(ImportUtils.TYPE_TEXTURE_ATLAS, "Texture Atlas");
typeNames.put(ImportUtils.TYPE_PARTICLE_EFFECT, "Particle Effect");
typeNames.put(ImportUtils.TYPE_SPINE_ANIMATION, "Spine Animation");
typeNames.put(ImportUtils.TYPE_SPRITE_ANIMATION_ATLAS, "Animation Atlas Pack");
typeNames.put(ImportUtils.TYPE_SPRITER_ANIMATION, "Spriter Animation");
typeNames.put(ImportUtils.TYPE_TTF_FONT, "TTF Font");
}
@Override
public void hide(Action action) {
super.hide(action);
}
public Image getDropRegion() {
return dropRegion;
}
public boolean checkDropRegionHit(Vector2 mousePos) {
Vector2 pos = new Vector2(mousePos.x - 8, mousePos.y - 31);
pos = dropRegion.screenToLocalCoordinates(pos);
if (dropRegion.hit(pos.x, pos.y, false) != null) {
return true;
}
dropRegion.getColor().a = 0.3f;
return false;
}
public void dragOver() {
dropRegion.getColor().a = 0.5f;
}
public void dragExit() {
dropRegion.getColor().a = 0.3f;
}
public void setDroppingView() {
mainTable.clear();
VisLabel helpLbl = new VisLabel("Supported file types: images, sprite animations (atlas or img sequence), spine animations, spriter scml, particle effects");
helpLbl.setWidth(260);
helpLbl.setWrap(true);
mainTable.add(helpLbl).width(260).padLeft(5);
mainTable.row().padBottom(5);
dropRegion = new Image(VisUI.getSkin().getDrawable("dropHere"));
mainTable.add(dropRegion).padRight(6).padBottom(6).padTop(10);
mainTable.row().pad(5);
mainTable.add(new VisLabel("or browse files on file system"));
mainTable.row().pad(5);
VisTextButton showFileSelectBtn = new VisTextButton("Browse");
mainTable.add(showFileSelectBtn).width(88);
mainTable.row().pad(5);
initDropListeners(showFileSelectBtn);
dragExit();
pack();
}
public void setImportingView(int type, boolean isMultiple) {
mainTable.clear();
errorLabel.getColor().a = 0;
errorLabel.clearActions();
String typeText = typeNames.get(type);
if (isMultiple) typeText += "'s";
mainTable.add(new VisLabel("Currently importing: " + typeText)).left();
mainTable.row().padBottom(5);
progressBar = new VisProgressBar(0, 100, 1, false);
mainTable.add(progressBar).fillX().padTop(5).width(250);
mainTable.row().padBottom(5);
VisTextButton cancelBtn = new VisTextButton("Cancel");
VisTextButton importBtn = new VisTextButton("Import");
VisTable btnTable = new VisTable();
btnTable.add("").expand().right();
btnTable.add(cancelBtn).width(50).padRight(5);
btnTable.add(importBtn).width(50).right().padRight(3);
pack();
initImportListeners(cancelBtn, importBtn);
}
private void initDropListeners(VisTextButton browseBtn) {
browseBtn.addListener(new ClickListener() {
public void clicked(InputEvent event, float x, float y) {
facade.sendNotification(BROWSE_BTN_CLICKED);
}
});
}
private void initImportListeners(VisTextButton cancelBtn, VisTextButton importBtn) {
cancelBtn.addListener(new ClickListener() {
public void clicked(InputEvent event, float x, float y) {
facade.sendNotification(CANCEL_BTN_CLICKED);
}
});
importBtn.addListener(new ClickListener() {
public void clicked(InputEvent event, float x, float y) {
facade.sendNotification(IMPORT_BTN_CLICKED);
}
});
}
public void showError(int type) {
String text = "";
if (type == ImportUtils.TYPE_UNSUPPORTED || type == ImportUtils.TYPE_UNCKNOWN) {
text = "unsupported file type/types";
}
if (type == ImportUtils.TYPE_MIXED) {
text = "Multiple import types, please use one";
}
errorLabel.setX(getWidth() / 2 - errorLabel.getWidth() / 2);
errorLabel.setY(getHeight() - errorLabel.getHeight() - 87);
errorLabel.setAlignment(Align.center);
errorLabel.setText(text);
errorLabel.addAction(Actions.sequence(Actions.fadeIn(0.3f), Actions.delay(2f), Actions.fadeOut(0.7f)));
dragExit();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.watcher.common.http;
import io.netty.handler.codec.http.HttpHeaders;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
public class HttpResponse implements ToXContentObject {
private final int status;
private final Map<String, String[]> headers;
private final BytesReference body;
public HttpResponse(int status) {
this(status, emptyMap());
}
public HttpResponse(int status, Map<String, String[]> headers) {
this(status, (BytesReference) null, headers);
}
public HttpResponse(int status, @Nullable String body) {
this(status, body != null ? new BytesArray(body) : null, emptyMap());
}
public HttpResponse(int status, @Nullable String body, Map<String, String[]> headers) {
this(status, body != null ? new BytesArray(body) : null, headers);
}
public HttpResponse(int status, @Nullable byte[] body) {
this(status, body != null && body.length > 0 ? new BytesArray(body) : null, emptyMap());
}
public HttpResponse(int status, @Nullable byte[] body, Map<String, String[]> headers) {
this(status, body != null && body.length > 0 ? new BytesArray(body) : null, headers);
}
public HttpResponse(int status, @Nullable BytesReference body, Map<String, String[]> headers) {
this.status = status;
this.body = body;
this.headers = headers.entrySet()
.stream()
.collect(Collectors.toUnmodifiableMap(e -> e.getKey().toLowerCase(Locale.ROOT), Map.Entry::getValue));
}
public int status() {
return status;
}
public boolean hasContent() {
return body != null;
}
public BytesReference body() {
return body;
}
/**
* Returns all the headers, with keys being lowercased, so they are always consistent
* in the payload
*/
public Map<String, List<String>> headers() {
return headers.entrySet()
.stream()
.collect(Collectors.toUnmodifiableMap(e -> e.getKey().toLowerCase(Locale.ROOT), e -> Arrays.asList(e.getValue())));
}
public String[] header(String header) {
return headers.get(header.toLowerCase(Locale.ROOT));
}
public String contentType() {
String[] values = header(HttpHeaders.Names.CONTENT_TYPE);
if (values == null || values.length == 0) {
return null;
}
return values[0];
}
public XContentType xContentType() {
String[] values = header(HttpHeaders.Names.CONTENT_TYPE);
if (values == null || values.length == 0) {
return null;
}
try {
return XContentType.fromMediaType(values[0]);
} catch (IllegalArgumentException e) {
// HttpInputTests - content-type being unrecognized_content_type
return null;
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
HttpResponse that = (HttpResponse) o;
if (status != that.status) return false;
if (headers.equals(that.headers) == false) return false;
return Objects.equals(body, that.body);
}
@Override
public int hashCode() {
int result = status;
result = 31 * result + headers.hashCode();
result = 31 * result + (body != null ? body.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("status=[").append(status).append("]");
if (headers.isEmpty() == false) {
sb.append(", headers=[");
boolean first = true;
for (Map.Entry<String, String[]> header : headers.entrySet()) {
if (first == false) {
sb.append(", ");
}
sb.append("[").append(header.getKey()).append(": ").append(Arrays.toString(header.getValue())).append("]");
first = false;
}
sb.append("]");
}
if (hasContent()) {
sb.append(", body=[").append(body.utf8ToString()).append("]");
}
return sb.toString();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder = builder.startObject().field(Field.STATUS.getPreferredName(), status);
if (headers.isEmpty() == false) {
builder.startObject(Field.HEADERS.getPreferredName());
for (Map.Entry<String, String[]> header : headers.entrySet()) {
// in order to prevent dots in field names, that might occur in headers, we simply de_dot those header names
// when writing toXContent
builder.array(header.getKey().replaceAll("\\.", "_"), header.getValue());
}
builder.endObject();
}
if (hasContent()) {
builder = builder.field(Field.BODY.getPreferredName(), body.utf8ToString());
}
builder.endObject();
return builder;
}
public static HttpResponse parse(XContentParser parser) throws IOException {
assert parser.currentToken() == XContentParser.Token.START_OBJECT;
int status = -1;
String body = null;
Map<String, String[]> headers = new HashMap<>();
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName == null) {
throw new ElasticsearchParseException("could not parse http response. expected a field name but found [{}] instead", token);
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (Field.STATUS.match(currentFieldName, parser.getDeprecationHandler())) {
status = parser.intValue();
} else {
throw new ElasticsearchParseException("could not parse http response. unknown numeric field [{}]", currentFieldName);
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) {
body = parser.text();
} else {
throw new ElasticsearchParseException("could not parse http response. unknown string field [{}]", currentFieldName);
}
} else if (token == XContentParser.Token.START_OBJECT) {
String headerName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
headerName = parser.currentName();
} else if (headerName == null) {
throw new ElasticsearchParseException(
"could not parse http response. expected a header name but found [{}] " + "instead",
token
);
} else if (token.isValue()) {
headers.put(headerName, new String[] { String.valueOf(parser.objectText()) });
} else if (token == XContentParser.Token.START_ARRAY) {
List<String> values = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token.isValue() == false) {
throw new ElasticsearchParseException(
"could not parse http response. expected a header value for header " + "[{}] but found [{}] instead",
headerName,
token
);
} else {
values.add(String.valueOf(parser.objectText()));
}
}
headers.put(headerName, values.toArray(new String[values.size()]));
}
}
} else {
throw new ElasticsearchParseException("could not parse http response. unexpected token [{}]", token);
}
}
if (status < 0) {
throw new ElasticsearchParseException(
"could not parse http response. missing required numeric [{}] field holding the " + "response's http status code",
Field.STATUS.getPreferredName()
);
}
return new HttpResponse(status, body, unmodifiableMap(headers));
}
interface Field {
ParseField STATUS = new ParseField("status");
ParseField HEADERS = new ParseField("headers");
ParseField BODY = new ParseField("body");
}
}
| |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.gui.view.impl.common;
import org.deidentifier.arx.Data;
import org.deidentifier.arx.DataDefinition;
import org.deidentifier.arx.DataHandle;
import org.deidentifier.arx.DataHandleSubset;
import org.deidentifier.arx.RowSet;
import org.deidentifier.arx.gui.Controller;
import org.deidentifier.arx.gui.model.ModelEvent;
import org.deidentifier.arx.gui.model.ModelEvent.ModelPart;
import org.deidentifier.arx.gui.resources.Resources;
import org.eclipse.nebula.widgets.nattable.selection.event.CellSelectionEvent;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
/**
* A view on a <code>Data</code> object.
*
* @author Fabian Prasser
*/
public class ViewDataInput extends ViewData {
/**
*
* Creates a new (non-editable) data view.
*
* @param parent
* @param controller
*/
public ViewDataInput(final Composite parent,
final Controller controller,
final String helpid) {
this (parent, controller, helpid, false);
}
/**
*
* Creates a new data view.
*
* @param parent
* @param controller
* @param editable
*/
public ViewDataInput(final Composite parent,
final Controller controller,
final String helpid,
final boolean editable) {
super(parent, controller, helpid, Resources.getMessage("AnalyzeView.1")); //$NON-NLS-1$
// Register
controller.addListener(ModelPart.RESEARCH_SUBSET, this);
controller.addListener(ModelPart.RESULT, this);
controller.addListener(ModelPart.ATTRIBUTE_TYPE, this);
controller.addListener(ModelPart.ATTRIBUTE_VALUE, this);
// Make editable
if (editable) {
final Menu menu = new Menu(parent.getShell());
MenuItem item1 = new MenuItem(menu, SWT.NONE);
item1.setText(Resources.getMessage("ViewDataInput.0")); //$NON-NLS-1$
item1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(final SelectionEvent arg0) {
controller.actionMenuEditFindReplace();
}
});
table.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent arg0) {
if (model != null && model.getSelectedAttribute() != null &&
model.getInputConfig() != null &&
model.getInputConfig().getInput() != null &&
model.getInputConfig().getInput().getHandle() != null) {
menu.setEnabled(true);
} else {
menu.setEnabled(false);
}
if (arg0.button == 3 && menu.isEnabled()) {
Point display = table.toDisplay(arg0.x, arg0.y);
menu.setLocation(display.x, display.y);
menu.setVisible(true);
}
}
});
}
}
@Override
public void update(final ModelEvent event) {
super.update(event);
if (event.part == ModelPart.INPUT) {
// No result avail
if (event.data == null) {
reset();
return;
}
// Obtain data definition
DataDefinition definition = getDefinition();
// Check
if (definition == null) {
reset();
return;
}
// Update the table
DataHandle handle = getHandle();
// Check
if (handle == null) {
reset();
return;
}
// Use input subset
table.setResearchSubset(model.getInputConfig().getResearchSubset());
table.setGroups(null);
table.setData(handle);
// Update the attribute types
table.getHeaderImages().clear();
for (int i = 0; i < handle.getNumColumns(); i++) {
updateHeaderImage(i, definition.getAttributeType(handle.getAttributeName(i)));
}
// Redraw
table.setEnabled(true);
table.redraw();
this.enableSorting();
} else if (event.part == ModelPart.RESEARCH_SUBSET) {
// Update research subset
table.setResearchSubset((RowSet)event.data);
table.redraw();
} else if (event.part == ModelPart.ATTRIBUTE_VALUE) {
table.redraw();
} else if (event.part == ModelPart.SELECTED_VIEW_CONFIG || event.part == ModelPart.RESULT) {
// Update the table
DataHandle handle = getHandle();
// Check
if (handle == null) {
reset();
return;
}
table.setData(handle);
table.setGroups(null);
table.setResearchSubset(model.getInputConfig().getResearchSubset());
table.redraw();
} else if (event.part == ModelPart.ATTRIBUTE_TYPE) {
if (model != null){
DataHandle handle = getHandle();
if (handle != null) {
final String attr = (String) event.data;
// Obtain data definition
DataDefinition definition = getDefinition();
// Update the attribute types
final int index = handle.getColumnIndexOf(attr);
updateHeaderImage(index, definition.getAttributeType(attr));
// Redraw
table.setEnabled(true);
table.redraw();
}
}
}
}
@Override
protected void actionCellSelected(CellSelectionEvent arg1) {
super.actionCellSelected(arg1);
if (model == null) return;
int column = arg1.getColumnPosition();
int row = arg1.getRowPosition();
if (column == 0 && row >= 0) {
// Remap row index if showing the subset
if (table.getData() instanceof DataHandleSubset) {
int[] subset = ((DataHandleSubset) table.getData()).getSubset();
row = subset[row];
}
// Perform change
RowSet subset = model.getInputConfig().getResearchSubset();
if (subset.contains(row)) {
subset.remove(row);
} else {
subset.add(row);
}
// Fire event
model.setSubsetManual();
controller.update(new ModelEvent(this, ModelPart.RESEARCH_SUBSET, subset));
}
}
@Override
protected void actionSort(){
controller.actionDataSort(true);
}
@Override
protected DataDefinition getDefinition() {
if (model == null) return null;
else return model.getInputDefinition();
}
@Override
protected DataHandle getHandle() {
if (model != null){
Data data = model.getInputConfig().getInput();
if (data == null) {
return null;
}
DataHandle handle = data.getHandle();
if (model.getViewConfig().isSubset() &&
model.getOutputConfig() != null &&
model.getOutputConfig().getConfig() != null) {
handle = handle.getView();
}
return handle;
} else {
return null;
}
}
}
| |
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.rng;
import org.apache.commons.math3.random.MersenneTwister;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.random.SynchronizedRandomGenerator;
import org.bytedeco.javacpp.Pointer;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.util.ArrayUtil;
/**
* Apache commons based random number generation
*
* Please note: this implementation can't be used for NativeOps execution
*
* @author Adam Gibson
*/
// TODO: make this op compatible with NativeOpExecutioner
public class DefaultRandom implements Random, RandomGenerator {
protected RandomGenerator randomGenerator;
protected long seed;
/**
* Initialize with a System.currentTimeMillis()
* seed
*/
public DefaultRandom() {
this(System.currentTimeMillis());
}
public DefaultRandom(long seed) {
this.seed = seed;
this.randomGenerator = new SynchronizedRandomGenerator(new MersenneTwister(seed));
}
public DefaultRandom(RandomGenerator randomGenerator) {
this.randomGenerator = randomGenerator;
}
@Override
public void setSeed(int seed) {
this.seed = (long) seed;
getRandomGenerator().setSeed(seed);
}
@Override
public void setSeed(int[] seed) {
throw new UnsupportedOperationException();
}
@Override
public void setSeed(long seed) {
this.seed = seed;
getRandomGenerator().setSeed(seed);
}
@Override
public void nextBytes(byte[] bytes) {
getRandomGenerator().nextBytes(bytes);
}
@Override
public int nextInt() {
return getRandomGenerator().nextInt();
}
@Override
public int nextInt(int n) {
return getRandomGenerator().nextInt(n);
}
@Override
public long nextLong() {
return getRandomGenerator().nextLong();
}
@Override
public boolean nextBoolean() {
return getRandomGenerator().nextBoolean();
}
@Override
public float nextFloat() {
return getRandomGenerator().nextFloat();
}
@Override
public double nextDouble() {
return getRandomGenerator().nextDouble();
}
@Override
public double nextGaussian() {
return getRandomGenerator().nextGaussian();
}
@Override
public INDArray nextGaussian(int[] shape) {
return nextGaussian(Nd4j.order(), shape);
}
@Override
public INDArray nextGaussian(char order, int[] shape) {
int length = ArrayUtil.prod(shape);
INDArray ret = Nd4j.create(shape, order);
DataBuffer data = ret.data();
for (int i = 0; i < length; i++) {
data.put(i, nextGaussian());
}
return ret;
}
@Override
public INDArray nextDouble(int[] shape) {
return nextDouble(Nd4j.order(), shape);
}
@Override
public INDArray nextDouble(char order, int[] shape) {
int length = ArrayUtil.prod(shape);
INDArray ret = Nd4j.create(shape, order);
DataBuffer data = ret.data();
for (int i = 0; i < length; i++) {
data.put(i, nextDouble());
}
return ret;
}
@Override
public INDArray nextFloat(int[] shape) {
return nextFloat(Nd4j.order(), shape);
}
@Override
public INDArray nextFloat(char order, int[] shape) {
int length = ArrayUtil.prod(shape);
INDArray ret = Nd4j.create(shape, order);
DataBuffer data = ret.data();
for (int i = 0; i < length; i++) {
data.put(i, nextFloat());
}
return ret;
}
@Override
public INDArray nextInt(int[] shape) {
int length = ArrayUtil.prod(shape);
INDArray ret = Nd4j.create(shape);
DataBuffer data = ret.data();
for (int i = 0; i < length; i++) {
data.put(i, nextInt());
}
return ret;
}
@Override
public INDArray nextInt(int n, int[] shape) {
int length = ArrayUtil.prod(shape);
INDArray ret = Nd4j.create(shape);
DataBuffer data = ret.data();
for (int i = 0; i < length; i++) {
data.put(i, nextInt(n));
}
return ret;
}
public synchronized RandomGenerator getRandomGenerator() {
return randomGenerator;
}
public synchronized long getSeed() {
return this.seed;
}
/**
* This method returns pointer to RNG state structure.
* Please note: DefaultRandom implementation returns NULL here, making it impossible to use with RandomOps
*
* @return
*/
@Override
public Pointer getStatePointer() {
return null;
}
/**
* This method returns pointer to RNG buffer
*
* @return
*/
@Override
public DataBuffer getStateBuffer() {
return null;
}
@Override
public void close() throws Exception {
//
}
/**
* Identical to setSeed(System.currentTimeMillis());
*/
@Override
public void reSeed() {
reSeed(System.currentTimeMillis());
}
/**
* Identical to setSeed(seed);
*
* @param seed
*/
@Override
public void reSeed(long seed) {
setSeed(seed);
}
}
| |
package org.drools.modelcompiler.builder.generator.visitor.accumulate;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import com.github.javaparser.StaticJavaParser;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.Modifier;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.AssignExpr;
import com.github.javaparser.ast.expr.ClassExpr;
import com.github.javaparser.ast.expr.EnclosedExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.FieldAccessExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.MethodReferenceExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.VariableDeclarationExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.Type;
import org.drools.compiler.lang.descr.AccumulateDescr;
import org.drools.compiler.lang.descr.PatternDescr;
import org.drools.modelcompiler.builder.PackageModel;
import org.drools.modelcompiler.builder.generator.DeclarationSpec;
import org.drools.modelcompiler.builder.generator.DrlxParseUtil;
import org.drools.modelcompiler.builder.generator.RuleContext;
import org.drools.modelcompiler.util.StringUtil;
import org.drools.mvelcompiler.MvelCompiler;
import org.drools.mvelcompiler.ParsingResult;
import org.drools.mvelcompiler.context.MvelCompilerContext;
import static com.github.javaparser.StaticJavaParser.parseStatement;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.addCurlyBracesToBlock;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.addSemicolon;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.forceCastForName;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.rescopeNamesToNewScope;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.ACC_FUNCTION_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.BIND_AS_CALL;
import static org.drools.modelcompiler.builder.generator.visitor.accumulate.AccumulateVisitor.collectNamesInBlock;
public class AccumulateInline {
protected final RuleContext context;
protected final PackageModel packageModel;
private final String REVERSE = "reverse";
private AccumulateDescr accumulateDescr;
private PatternDescr basePattern;
private ClassOrInterfaceDeclaration accumulateInlineClass;
private ClassOrInterfaceDeclaration contextData;
private String accumulateInlineClassName;
private final List<DeclarationSpec> accumulateDeclarations = new ArrayList<>();
private final List<String> contextFieldNames = new ArrayList<>();
private Set<String> usedExternalDeclarations = new HashSet<>();
private Type singleAccumulateType;
Set<String> getUsedExternalDeclarations() {
return usedExternalDeclarations;
}
private MvelCompiler mvelCompiler;
AccumulateInline(RuleContext context,
PackageModel packageModel,
AccumulateDescr descr,
PatternDescr basePattern) {
this.context = context;
this.packageModel = packageModel;
this.accumulateDescr = descr;
this.basePattern = basePattern;
MvelCompilerContext mvelCompilerContext = new MvelCompilerContext(context.getTypeResolver());
for (DeclarationSpec ds : context.getAllDeclarations()) {
mvelCompilerContext.addDeclaration(ds.getBindingId(), ds.getDeclarationClass());
}
mvelCompiler = new MvelCompiler(mvelCompilerContext);
singleAccumulateType = null;
}
/**
* By design this legacy accumulate (with inline custome code) visitor supports only with 1-and-only binding in the accumulate code/expressions.
*/
void visitAccInlineCustomCode(MethodCallExpr accumulateDSL, Set<String> externalDeclarations, String identifier) {
initInlineAccumulateTemplate();
context.pushExprPointer(accumulateDSL::addArgument);
parseInitBlock();
Collection<String> allNamesInActionBlock = parseActionBlock(externalDeclarations);
parseReverseBlock(externalDeclarations, allNamesInActionBlock);
parseResultMethod();
if (!usedExternalDeclarations.isEmpty()) {
throw new UnsupportedInlineAccumulate();
}
for (DeclarationSpec d : accumulateDeclarations) {
context.addDeclaration(d);
}
addAccumulateClassInitializationToMethod(accumulateDSL, identifier);
context.popExprPointer();
}
private void initInlineAccumulateTemplate() {
accumulateInlineClassName = StringUtil.toId(context.getRuleDescr().getName()) + "Accumulate" + accumulateDescr.getLine();
CompilationUnit templateCU;
try {
templateCU = StaticJavaParser.parseResource("AccumulateInlineTemplate.java");
} catch (IOException e) {
throw new InvalidInlineTemplateException(e);
}
ClassOrInterfaceDeclaration parsedClass =
templateCU
.getClassByName("AccumulateInlineFunction")
.orElseThrow(InvalidInlineTemplateException::new);
parsedClass.setName(accumulateInlineClassName);
parsedClass.findAll(ClassOrInterfaceType.class, c -> "CONTEXT_DATA_GENERIC".equals(c.asString()))
.forEach(c -> c.setName(accumulateInlineClassName + ".ContextData"));
this.accumulateInlineClass = parsedClass;
contextData = this.accumulateInlineClass.findFirst(ClassOrInterfaceDeclaration.class
, c -> "ContextData".equals(c.getNameAsString()))
.orElseThrow(InvalidInlineTemplateException::new);
}
private void parseInitBlock() {
MethodDeclaration initMethod = getMethodFromTemplateClass("init");
String mvelBlock = addCurlyBracesToBlock(addSemicolon(accumulateDescr.getInitCode()));
ParsingResult initCodeCompilationResult = mvelCompiler.compile(mvelBlock);
BlockStmt initBlock = initCodeCompilationResult.statementResults();
for (Statement stmt : initBlock.getStatements()) {
final BlockStmt initMethodBody = initMethod.getBody().orElseThrow(InvalidInlineTemplateException::new);
if (stmt.isExpressionStmt() && stmt.asExpressionStmt().getExpression().isVariableDeclarationExpr()) {
VariableDeclarationExpr vdExpr = stmt.asExpressionStmt().getExpression().asVariableDeclarationExpr();
for (VariableDeclarator vd : vdExpr.getVariables()) {
final String variableName = vd.getNameAsString();
contextFieldNames.add(variableName);
contextData.addField(vd.getType(), variableName, Modifier.publicModifier().getKeyword());
Optional<Expression> optInitializer = vd.getInitializer();
optInitializer.ifPresent(initializer -> {
Expression target = new FieldAccessExpr(getDataNameExpr(), variableName);
Statement initStmt = new ExpressionStmt(new AssignExpr(target, initializer, AssignExpr.Operator.ASSIGN));
initMethodBody.addStatement(initStmt);
initStmt.findAll(NameExpr.class).stream().map(Node::toString).filter(context::hasDeclaration).forEach(usedExternalDeclarations::add);
});
accumulateDeclarations.add(new DeclarationSpec(variableName, DrlxParseUtil.getClassFromContext(context.getTypeResolver(), vd.getType().asString())));
}
}
}
}
private void writeAccumulateMethod(List<String> contextFieldNames, MethodDeclaration accumulateMethod, BlockStmt actionBlock) {
for (Statement stmt : actionBlock.getStatements()) {
final ExpressionStmt convertedExpressionStatement = new ExpressionStmt();
for (ExpressionStmt eStmt : stmt.findAll(ExpressionStmt.class)) {
final Expression expressionUntyped = eStmt.getExpression();
final String parameterName = accumulateMethod.getParameter(1).getNameAsString();
forceCastForName(parameterName, singleAccumulateType, expressionUntyped);
rescopeNamesToNewScope(getDataNameExpr(), contextFieldNames, expressionUntyped);
convertedExpressionStatement.setExpression(expressionUntyped);
}
accumulateMethod.getBody().orElseThrow(InvalidInlineTemplateException::new)
.addStatement(convertedExpressionStatement);
}
}
private Collection<String> parseActionBlock(Set<String> externalDeclarations) {
MethodDeclaration accumulateMethod = getMethodFromTemplateClass("accumulate");
String actionCode = accumulateDescr.getActionCode();
if(blockIsNonEmptyWithoutSemicolon(actionCode)) {
throw new MissingSemicolonInlineAccumulateException("action");
}
ParsingResult actionBlockCompilationResult = mvelCompiler.compile(addCurlyBracesToBlock(actionCode));
BlockStmt actionBlock = actionBlockCompilationResult.statementResults();
Collection<String> allNamesInActionBlock = collectNamesInBlock(actionBlock, context);
if (allNamesInActionBlock.size() == 1) {
String nameExpr = allNamesInActionBlock.iterator().next();
accumulateMethod.getParameter(1).setName(nameExpr);
singleAccumulateType =
context.getDeclarationById(nameExpr)
.orElseThrow(() -> new IllegalStateException("Cannot find declaration by name " + nameExpr + "!"))
.getBoxedType();
writeAccumulateMethod(contextFieldNames, accumulateMethod, actionBlock);
} else {
allNamesInActionBlock.removeIf(name -> !externalDeclarations.contains(name));
usedExternalDeclarations.addAll(allNamesInActionBlock);
throw new UnsupportedInlineAccumulate();
}
return allNamesInActionBlock;
}
private void parseReverseBlock(Set<String> externalDeclarations, Collection<String> allNamesInActionBlock) {
String reverseCode = accumulateDescr.getReverseCode();
ParsingResult reverseBlockCompilationResult = mvelCompiler.compile(addCurlyBracesToBlock(reverseCode));
BlockStmt reverseBlock = reverseBlockCompilationResult.statementResults();
if (reverseCode != null) {
if(blockIsNonEmptyWithoutSemicolon(reverseCode)) {
throw new MissingSemicolonInlineAccumulateException(REVERSE);
}
Collection<String> allNamesInReverseBlock = collectNamesInBlock(reverseBlock, context);
if (allNamesInReverseBlock.size() == 1) {
MethodDeclaration reverseMethod = getMethodFromTemplateClass(REVERSE);
reverseMethod.getParameter(1).setName(allNamesInReverseBlock.iterator().next());
writeAccumulateMethod(contextFieldNames, reverseMethod, reverseBlock);
MethodDeclaration supportsReverseMethod = getMethodFromTemplateClass("supportsReverse");
supportsReverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("return true;"));
} else {
allNamesInActionBlock.removeIf(name -> !externalDeclarations.contains(name));
usedExternalDeclarations.addAll(allNamesInActionBlock);
throw new UnsupportedInlineAccumulate();
}
} else {
MethodDeclaration supportsReverseMethod = getMethodFromTemplateClass("supportsReverse");
supportsReverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("return false;"));
MethodDeclaration reverseMethod = getMethodFromTemplateClass(REVERSE);
reverseMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(parseStatement("throw new UnsupportedOperationException(\"This function does not support reverse.\");"));
}
}
private void parseResultMethod() {
// <result expression>: this is a semantic expression in the selected dialect that is executed after all source objects are iterated.
MethodDeclaration resultMethod = getMethodFromTemplateClass("getResult");
Type returnExpressionType = StaticJavaParser.parseType("java.lang.Object");
Expression returnExpression = StaticJavaParser.parseExpression(accumulateDescr.getResultCode());
if (returnExpression instanceof NameExpr) {
returnExpression = new EnclosedExpr(returnExpression);
}
rescopeNamesToNewScope(getDataNameExpr(), contextFieldNames, returnExpression);
resultMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(new ReturnStmt(returnExpression));
MethodDeclaration getResultTypeMethod = getMethodFromTemplateClass("getResultType");
getResultTypeMethod
.getBody()
.orElseThrow(InvalidInlineTemplateException::new)
.addStatement(new ReturnStmt(new ClassExpr(returnExpressionType)));
}
private void addAccumulateClassInitializationToMethod(MethodCallExpr accumulateDSL, String identifier) {
this.packageModel.addGeneratedPOJO(accumulateInlineClass);
final MethodCallExpr functionDSL = new MethodCallExpr(null, ACC_FUNCTION_CALL);
functionDSL.addArgument(new MethodReferenceExpr(new NameExpr(accumulateInlineClassName), new NodeList<>(), "new"));
functionDSL.addArgument(context.getVarExpr(identifier));
final String bindingId = this.basePattern.getIdentifier();
final MethodCallExpr asDSL = new MethodCallExpr(functionDSL, BIND_AS_CALL);
asDSL.addArgument(context.getVarExpr(bindingId));
accumulateDSL.addArgument(asDSL);
}
private NameExpr getDataNameExpr() {
return new NameExpr("data");
}
private MethodDeclaration getMethodFromTemplateClass(String init) {
return accumulateInlineClass.getMethodsByName(init).get(0);
}
private boolean blockIsNonEmptyWithoutSemicolon(String block) {
return !"".equals(block) && !block.endsWith(";");
}
}
| |
/*
* TypeSubstitutionVisitor.java
*
* Copyright (c) 2013 Mike Strobel
*
* This source code is based on Mono.Cecil from Jb Evain, Copyright (c) Jb Evain;
* and ILSpy/ICSharpCode from SharpDevelop, Copyright (c) AlphaSierraPapa.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0.
* A copy of the license can be found in the License.html file at the root of this distribution.
* By using this source code in any fashion, you are agreeing to be bound by the terms of the
* Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*/
package com.strobel.assembler.metadata;
import com.strobel.core.ArrayUtilities;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public final class TypeSubstitutionVisitor extends DefaultTypeVisitor<Map<TypeReference, TypeReference>, TypeReference>
implements MethodMetadataVisitor<Map<TypeReference, TypeReference>, MethodReference>,
FieldMetadataVisitor<Map<TypeReference, TypeReference>, FieldReference> {
private final static TypeSubstitutionVisitor INSTANCE = new TypeSubstitutionVisitor();
public static TypeSubstitutionVisitor instance() {
return INSTANCE;
}
public TypeReference visit(final TypeReference t, final Map<TypeReference, TypeReference> map) {
if (map.isEmpty()) {
return t;
}
return t.accept(this, map);
}
@Override
public TypeReference visitArrayType(final ArrayType t, final Map<TypeReference, TypeReference> map) {
final TypeReference elementType = visit(t.getElementType(), map);
if (elementType != null && elementType != t.getElementType()) {
return elementType.makeArrayType();
}
return t;
}
@Override
public TypeReference visitGenericParameter(final GenericParameter t, final Map<TypeReference, TypeReference> map) {
TypeReference current = t;
TypeReference mappedType;
while ((mappedType = map.get(current)) != null &&
mappedType != current &&
map.get(mappedType) != current) {
current = mappedType;
}
if (current == null) {
return t;
}
if (current.isPrimitive()) {
switch (current.getSimpleType()) {
case Boolean:
return CommonTypeReferences.Boolean;
case Byte:
return CommonTypeReferences.Byte;
case Character:
return CommonTypeReferences.Character;
case Short:
return CommonTypeReferences.Short;
case Integer:
return CommonTypeReferences.Integer;
case Long:
return CommonTypeReferences.Long;
case Float:
return CommonTypeReferences.Float;
case Double:
return CommonTypeReferences.Double;
case Void:
return CommonTypeReferences.Void;
}
}
return current;
}
@Override
public TypeReference visitWildcard(final WildcardType t, final Map<TypeReference, TypeReference> map) {
if (t.isUnbounded()) {
return t;
}
final TypeReference oldBound = t.hasExtendsBound() ? t.getExtendsBound() : t.getSuperBound();
final TypeReference mapping = map.get(oldBound);
if (MetadataResolver.areEquivalent(mapping, t)) {
return t;
}
TypeReference newBound = visit(oldBound, map);
while (newBound.isWildcardType()) {
if (newBound.isUnbounded()) {
return newBound;
}
newBound = newBound.hasExtendsBound() ? newBound.getExtendsBound()
: newBound.getSuperBound();
}
if (oldBound != newBound) {
return t.hasExtendsBound() ? WildcardType.makeExtends(newBound)
: WildcardType.makeSuper(newBound);
}
return t;
}
@Override
public TypeReference visitCompoundType(final CompoundTypeReference t, final Map<TypeReference, TypeReference> map) {
final TypeReference oldBaseType = t.getBaseType();
final TypeReference newBaseType = oldBaseType != null ? visit(oldBaseType, map) : null;
TypeReference[] newInterfaces = null;
boolean changed = newBaseType != oldBaseType;
final List<TypeReference> oldInterfaces = t.getInterfaces();
for (int i = 0; i < oldInterfaces.size(); i++) {
final TypeReference oldInterface = oldInterfaces.get(i);
final TypeReference newInterface = visit(oldInterface, map);
if (newInterfaces != null) {
newInterfaces[i] = newInterface;
}
else if (oldInterface != newInterface) {
newInterfaces = new TypeReference[oldInterfaces.size()];
oldInterfaces.toArray(newInterfaces);
newInterfaces[i] = newInterface;
changed = true;
}
}
if (changed) {
return new CompoundTypeReference(
newBaseType,
newInterfaces != null ? ArrayUtilities.asUnmodifiableList(newInterfaces)
: t.getInterfaces()
);
}
return t;
}
@Override
public TypeReference visitParameterizedType(final TypeReference t, final Map<TypeReference, TypeReference> map) {
final List<TypeReference> oldTypeArguments = ((IGenericInstance) t).getTypeArguments();
TypeReference[] newTypeArguments = null;
boolean changed = false;
for (int i = 0; i < oldTypeArguments.size(); i++) {
final TypeReference oldTypeArgument = oldTypeArguments.get(i);
final TypeReference newTypeArgument = visit(oldTypeArgument, map);
if (newTypeArguments != null) {
newTypeArguments[i] = newTypeArgument;
}
else if (oldTypeArgument != newTypeArgument) {
newTypeArguments = new TypeReference[oldTypeArguments.size()];
oldTypeArguments.toArray(newTypeArguments);
newTypeArguments[i] = newTypeArgument;
changed = true;
}
}
if (changed) {
return t.makeGenericType(newTypeArguments);
}
return t;
}
@Override
public TypeReference visitPrimitiveType(final PrimitiveType t, final Map<TypeReference, TypeReference> map) {
return t;
}
@Override
@SuppressWarnings("unchecked")
public TypeReference visitClassType(final TypeReference t, final Map<TypeReference, TypeReference> map) {
final TypeReference resolvedType = t.isGenericType() ? t : t.resolve();
if (resolvedType == null || !resolvedType.isGenericDefinition()) {
return t;
}
final List<TypeReference> oldTypeArguments = (List<TypeReference>) (Object) resolvedType.getGenericParameters();
TypeReference[] newTypeArguments = null;
boolean changed = false;
for (int i = 0; i < oldTypeArguments.size(); i++) {
final TypeReference oldTypeArgument = oldTypeArguments.get(i);
final TypeReference newTypeArgument = visit(oldTypeArgument, map);
if (newTypeArguments != null) {
newTypeArguments[i] = newTypeArgument;
}
else if (oldTypeArgument != newTypeArgument) {
newTypeArguments = new TypeReference[oldTypeArguments.size()];
oldTypeArguments.toArray(newTypeArguments);
newTypeArguments[i] = newTypeArgument;
changed = true;
}
}
if (changed) {
return t.makeGenericType(newTypeArguments);
}
return t;
}
@Override
public TypeReference visitNullType(final TypeReference t, final Map<TypeReference, TypeReference> map) {
return t;
}
@Override
public TypeReference visitBottomType(final TypeReference t, final Map<TypeReference, TypeReference> map) {
return t;
}
@Override
public TypeReference visitRawType(final RawType t, final Map<TypeReference, TypeReference> map) {
return t;
}
@Override
public MethodReference visitParameterizedMethod(final MethodReference m, final Map<TypeReference, TypeReference> map) {
return visitMethod(m, map);
}
@Override
@SuppressWarnings("unchecked")
public MethodReference visitMethod(final MethodReference m, final Map<TypeReference, TypeReference> map) {
final MethodDefinition resolvedMethod = m.resolve();
final List<TypeReference> oldTypeArguments;
final List<TypeReference> newTypeArguments;
if (m instanceof IGenericInstance) {
oldTypeArguments = ((IGenericInstance) m).getTypeArguments();
}
else if (m.isGenericDefinition()) {
oldTypeArguments = (List<TypeReference>) (Object) m.getGenericParameters();
}
else {
oldTypeArguments = Collections.emptyList();
}
newTypeArguments = visitTypes(oldTypeArguments, map);
final TypeReference oldReturnType = m.getReturnType();
final TypeReference newReturnType = visit(oldReturnType, map);
final List<ParameterDefinition> oldParameters = m.getParameters();
final List<ParameterDefinition> newParameters = visitParameters(oldParameters, map);
if (newTypeArguments != oldTypeArguments ||
newReturnType != oldReturnType ||
newParameters != oldParameters) {
return new GenericMethodInstance(
visit(m.getDeclaringType(), map),
resolvedMethod != null ? resolvedMethod : m,
newReturnType,
newParameters == oldParameters ? MetadataHelper.copyParameters(oldParameters)
: newParameters,
newTypeArguments
);
}
return m;
}
@Override
public TypeReference visitCapturedType(
final CapturedType t,
final Map<TypeReference, TypeReference> map) {
final TypeReference oldExtendsBound = t.getExtendsBound();
final TypeReference oldSuperBound = t.getSuperBound();
final TypeReference oldWildcard = t.getWildcard();
final TypeReference newExtendsBound = visit(oldExtendsBound, map);
final TypeReference newSuperBound = visit(oldSuperBound, map);
final TypeReference newWildcard = visitWildcard((WildcardType) oldWildcard, map);
if (newExtendsBound != oldExtendsBound ||
newSuperBound != oldSuperBound ||
newWildcard != oldWildcard) {
return new CapturedType(newSuperBound, newExtendsBound, (WildcardType) newWildcard);
}
return t;
}
protected List<TypeReference> visitTypes(
final List<TypeReference> types,
final Map<TypeReference, TypeReference> map) {
TypeReference[] newTypes = null;
boolean changed = false;
for (int i = 0; i < types.size(); i++) {
final TypeReference oldTypeArgument = types.get(i);
final TypeReference newTypeArgument = visit(oldTypeArgument, map);
if (newTypes != null) {
newTypes[i] = newTypeArgument;
}
else if (oldTypeArgument != newTypeArgument) {
newTypes = new TypeReference[types.size()];
types.toArray(newTypes);
newTypes[i] = newTypeArgument;
changed = true;
}
}
return changed ? ArrayUtilities.asUnmodifiableList(newTypes)
: types;
}
protected List<ParameterDefinition> visitParameters(
final List<ParameterDefinition> parameters,
final Map<TypeReference, TypeReference> map) {
if (parameters.isEmpty()) {
return parameters;
}
ParameterDefinition[] newParameters = null;
boolean changed = false;
for (int i = 0; i < parameters.size(); i++) {
final ParameterDefinition oldParameter = parameters.get(i);
final TypeReference oldType = oldParameter.getParameterType();
final TypeReference newType = visit(oldType, map);
final ParameterDefinition newParameter;
newParameter = oldType != newType ? new ParameterDefinition(oldParameter.getSlot(), newType)
: oldParameter;
if (newParameters != null) {
newParameters[i] = newParameter;
}
else if (oldType != newType) {
newParameters = new ParameterDefinition[parameters.size()];
parameters.toArray(newParameters);
newParameters[i] = newParameter;
changed = true;
}
}
return changed ? ArrayUtilities.asUnmodifiableList(newParameters)
: parameters;
}
@Override
public FieldReference visitField(final FieldReference f, final Map<TypeReference, TypeReference> map) {
final TypeReference oldFieldType = f.getFieldType();
final TypeReference newFieldType = visit(oldFieldType, map);
if (newFieldType != oldFieldType) {
final TypeReference declaringType = f.getDeclaringType();
return new FieldReference() {
private final String _name = f.getName();
private final TypeReference _type = newFieldType;
@Override
public TypeReference getFieldType() {
return _type;
}
@Override
public TypeReference getDeclaringType() {
return declaringType;
}
@Override
public String getName() {
return _name;
}
@Override
protected StringBuilder appendName(final StringBuilder sb, final boolean fullName, final boolean dottedName) {
if (fullName) {
final TypeReference declaringType = getDeclaringType();
if (declaringType != null) {
return declaringType.appendName(sb, true, false).append('.').append(getName());
}
}
return sb.append(_name);
}
};
}
return f;
}
}
| |
/* $Id$ */
package ibis.io.rewriter;
import java.util.Arrays;
import org.apache.bcel.Constants;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import org.apache.bcel.generic.AALOAD;
import org.apache.bcel.generic.AASTORE;
import org.apache.bcel.generic.ACONST_NULL;
import org.apache.bcel.generic.ALOAD;
import org.apache.bcel.generic.ARETURN;
import org.apache.bcel.generic.ARRAYLENGTH;
import org.apache.bcel.generic.ASTORE;
import org.apache.bcel.generic.ATHROW;
import org.apache.bcel.generic.ArrayType;
import org.apache.bcel.generic.BasicType;
import org.apache.bcel.generic.ClassGen;
import org.apache.bcel.generic.ConstantPoolGen;
import org.apache.bcel.generic.DUP;
import org.apache.bcel.generic.FieldGen;
import org.apache.bcel.generic.GOTO;
import org.apache.bcel.generic.IAND;
import org.apache.bcel.generic.ICONST;
import org.apache.bcel.generic.IFEQ;
import org.apache.bcel.generic.IF_ACMPEQ;
import org.apache.bcel.generic.IF_ICMPEQ;
import org.apache.bcel.generic.IF_ICMPGT;
import org.apache.bcel.generic.IF_ICMPLT;
import org.apache.bcel.generic.IF_ICMPNE;
import org.apache.bcel.generic.IINC;
import org.apache.bcel.generic.ILOAD;
import org.apache.bcel.generic.ISTORE;
import org.apache.bcel.generic.Instruction;
import org.apache.bcel.generic.InstructionFactory;
import org.apache.bcel.generic.InstructionHandle;
import org.apache.bcel.generic.InstructionList;
import org.apache.bcel.generic.LDC;
import org.apache.bcel.generic.MethodGen;
import org.apache.bcel.generic.NOP;
import org.apache.bcel.generic.ObjectType;
import org.apache.bcel.generic.PUSH;
import org.apache.bcel.generic.RETURN;
import org.apache.bcel.generic.ReferenceType;
import org.apache.bcel.generic.SIPUSH;
import org.apache.bcel.generic.SWITCH;
import org.apache.bcel.generic.Type;
/**
* The CodeGenerator is responsible for generation of the actual bytecode
* used at runtime to do serialization.
*
* @author Nick Palmer (npr200@few.vu.nl)
*
*/
class CodeGenerator implements RewriterConstants {
protected JavaClass clazz;
protected ClassGen gen;
protected String classname;
protected String super_classname;
protected JavaClass super_class;
protected boolean super_is_serializable;
protected boolean super_is_ibis_serializable;
protected boolean super_has_ibis_constructor;
protected boolean is_externalizable;
protected boolean has_serial_persistent_fields;
protected boolean has_final_fields;
protected Field[] fields;
protected Method[] methods;
protected InstructionFactory factory;
protected ConstantPoolGen constantpool;
protected IOGenerator generator;
private boolean is_abstract;
public static JavaClass lookupClass(String name) {
try {
return Repository.lookupClass(name);
} catch(ClassNotFoundException e) {
System.err.println("Warning: class " + name + " not found");
return null;
}
}
CodeGenerator(IOGenerator generator, JavaClass cl) {
this.generator = generator;
clazz = cl;
gen = new ClassGen(clazz);
classname = clazz.getClassName();
super_classname = clazz.getSuperclassName();
super_class = lookupClass(super_classname);
fields = gen.getFields();
methods = gen.getMethods();
factory = new InstructionFactory(gen);
constantpool = gen.getConstantPool();
versionUID();
/* getFields() does not specify or guarantee a specific order.
* Therefore, we sort the fields alphabetically, and the
* serialization code in ibis.io should do the same.
*/
Arrays.sort(fields, SerializationInfo.fieldComparator);
super_is_serializable = SerializationInfo.isSerializable(super_class);
is_externalizable = SerializationInfo.isExternalizable(cl);
is_abstract = cl.isAbstract();
super_is_ibis_serializable = SerializationInfo.isIbisSerializable(super_class);
super_has_ibis_constructor = SerializationInfo.hasIbisConstructor(super_class);
has_serial_persistent_fields = SerializationInfo.hasSerialPersistentFields(fields);
has_final_fields = SerializationInfo.hasFinalFields(fields);
}
/**
* Get the serialversionuid of a class that is about to be
* rewritten. If necessary, a serialVersionUID field is added.
*/
private void versionUID() {
for (int i = 0; i < fields.length; i++) {
Field f = fields[i];
if (f.getName().equals(FIELD_SERIAL_VERSION_UID) && f.isFinal()
&& f.isStatic()) {
/* Already present. Just return. */
return;
}
}
long uid = 0;
uid = SerializationInfo.getSerialVersionUID(classname, clazz);
if (uid != 0) {
FieldGen f = new FieldGen(Constants.ACC_PRIVATE
| Constants.ACC_FINAL | Constants.ACC_STATIC,
Type.LONG, FIELD_SERIAL_VERSION_UID, constantpool);
f.setInitValue(uid);
gen.addField(f.getField());
fields = gen.getFields();
}
}
private Instruction createGeneratedWriteObjectInvocation(String name,
short invmode) {
return factory.createInvoke(name, METHOD_GENERATED_WRITE_OBJECT,
Type.VOID, ibis_output_stream_arrtp, invmode);
}
private Instruction createGeneratedDefaultReadObjectInvocation(
String name, InstructionFactory fac, short invmode) {
return fac.createInvoke(name, METHOD_GENERATED_DEFAULT_READ_OBJECT,
Type.VOID, new Type[] { ibis_input_stream, Type.INT },
invmode);
}
private Instruction createInitInvocation(String name,
InstructionFactory f) {
return f.createInvoke(name, METHOD_INIT, Type.VOID,
ibis_input_stream_arrtp, Constants.INVOKESPECIAL);
}
private Instruction createGeneratedDefaultWriteObjectInvocation(
String name) {
return factory.createInvoke(name, METHOD_GENERATED_DEFAULT_WRITE_OBJECT,
Type.VOID, new Type[] { ibis_output_stream, Type.INT },
Constants.INVOKESPECIAL);
}
private Instruction createWriteObjectInvocation() {
return factory.createInvoke(classname, METHOD_WRITE_OBJECT, Type.VOID,
new Type[] { sun_output_stream }, Constants.INVOKESPECIAL);
}
private int getClassDepth(JavaClass cl) {
if (!SerializationInfo.isSerializable(cl)) {
return 0;
}
return 1 + getClassDepth(lookupClass(
cl.getSuperclassName()));
}
void generateEmptyMethods() {
/* Generate the necessary (empty) methods. */
if (generator.isVerbose()) {
System.out.println(" Generating empty methods for class : "
+ classname);
System.out.println(" " + classname
+ " implements java.io.Serializable -> adding "
+ TYPE_IBIS_IO_SERIALIZABLE);
}
/* add the ibis.io.Serializable interface to the class */
gen.addInterface(TYPE_IBIS_IO_SERIALIZABLE);
/* Construct a write method */
InstructionList il = new InstructionList();
il.append(new RETURN());
int flags = Constants.ACC_PUBLIC
| (gen.isFinal() ? Constants.ACC_FINAL : 0);
MethodGen write_method = new MethodGen(flags, Type.VOID,
ibis_output_stream_arrtp, new String[] { VARIABLE_OUTPUT_STREAM },
METHOD_GENERATED_WRITE_OBJECT, classname, il, constantpool);
write_method.addException(TYPE_JAVA_IO_IOEXCEPTION);
gen.addMethod(write_method.getMethod());
/* ... and a default_write_method */
il = new InstructionList();
il.append(new RETURN());
MethodGen default_write_method = new MethodGen(flags, Type.VOID,
new Type[] { ibis_output_stream, Type.INT }, new String[] {
VARIABLE_OUTPUT_STREAM, VARIABLE_LEVEL },
METHOD_GENERATED_DEFAULT_WRITE_OBJECT,
classname, il, constantpool);
default_write_method.addException(TYPE_JAVA_IO_IOEXCEPTION);
gen.addMethod(default_write_method.getMethod());
/* ... and a default_read_method */
il = new InstructionList();
il.append(new RETURN());
MethodGen default_read_method = new MethodGen(flags, Type.VOID,
new Type[] { ibis_input_stream, Type.INT }, new String[] {
VARIABLE_OUTPUT_STREAM, VARIABLE_LEVEL },
METHOD_GENERATED_DEFAULT_READ_OBJECT,
classname, il, constantpool);
default_read_method.addException(TYPE_JAVA_IO_IOEXCEPTION);
default_read_method.addException(
TYPE_JAVA_LANG_CLASS_NOT_FOUND_EXCEPTION);
gen.addMethod(default_read_method.getMethod());
/* Construct a read-of-the-stream constructor, but only when we
* can actually use it.
*/
if (is_externalizable || !super_is_serializable
|| generator.forceGeneratedCalls() || super_has_ibis_constructor) {
il = new InstructionList();
il.append(new RETURN());
MethodGen read_cons = new MethodGen(Constants.ACC_PUBLIC,
Type.VOID, ibis_input_stream_arrtp,
new String[] { VARIABLE_INPUT_STREAM }, METHOD_INIT, classname, il,
constantpool);
read_cons.addException(TYPE_JAVA_IO_IOEXCEPTION);
read_cons.addException(TYPE_JAVA_LANG_CLASS_NOT_FOUND_EXCEPTION);
gen.addMethod(read_cons.getMethod());
} else if (SerializationInfo.hasReadObject(methods)) {
il = new InstructionList();
il.append(new RETURN());
MethodGen readobjectWrapper = new MethodGen(
Constants.ACC_PUBLIC, Type.VOID,
ibis_input_stream_arrtp, new String[] { VARIABLE_INPUT_STREAM },
METHOD_$READ_OBJECT_WRAPPER$, classname, il, constantpool);
readobjectWrapper.addException(TYPE_JAVA_IO_IOEXCEPTION);
readobjectWrapper.addException(
TYPE_JAVA_LANG_CLASS_NOT_FOUND_EXCEPTION);
gen.addMethod(readobjectWrapper.getMethod());
}
/* Now, create a new class structure, which has these methods. */
JavaClass newclazz = gen.getJavaClass();
generator.replace(clazz, newclazz);
clazz = newclazz;
}
private InstructionList writeInstructions(Field field) {
String field_sig = field.getSignature();
Type field_type = Type.getType(field_sig);
SerializationInfo info = SerializationInfo.getSerializationInfo(field_type);
Type t = info.tp;
InstructionList temp = new InstructionList();
if (!info.primitive) {
t = Type.getType(field_sig);
}
temp.append(new ALOAD(1));
temp.append(new ALOAD(0));
temp.append(factory.createFieldAccess(classname, field.getName(),
t, Constants.GETFIELD));
temp.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
info.write_name, Type.VOID, info.param_tp_arr,
Constants.INVOKEVIRTUAL));
return temp;
}
private InstructionList readInstructions(Field field,
boolean from_constructor) {
String field_sig = field.getSignature();
Type field_type = Type.getType(field_sig);
SerializationInfo info = SerializationInfo.getSerializationInfo(field_type);
Type t = info.tp;
InstructionList temp = new InstructionList();
if (!info.primitive) {
t = Type.getType(field_sig);
}
if (from_constructor || !field.isFinal()) {
temp.append(new ALOAD(0));
temp.append(new ALOAD(1));
temp.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
info.read_name, info.tp, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
if (!info.primitive) {
temp.append(factory.createCheckCast((ReferenceType) t));
}
temp.append(factory.createFieldAccess(classname,
field.getName(), t, Constants.PUTFIELD));
} else {
temp.append(new ALOAD(1));
temp.append(new ALOAD(0));
int ind = constantpool.addString(field.getName());
temp.append(new LDC(ind));
ind = constantpool.addString(classname);
temp.append(new LDC(ind));
if (!info.primitive) {
int ind2 = constantpool.addString(field_sig);
temp.append(new LDC(ind2));
}
temp.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
info.final_read_name, Type.VOID,
info.primitive ? new Type[] {
Type.OBJECT, Type.STRING,
Type.STRING }
: new Type[] { Type.OBJECT,
Type.STRING, Type.STRING,
Type.STRING },
Constants.INVOKEVIRTUAL));
}
return temp;
}
private String writeCallName(String name) {
return METHOD_WRITE_ARRAY + name.substring(0, 1).toUpperCase()
+ name.substring(1);
}
private InstructionList writeReferenceField(Field field) {
Type field_type = Type.getType(field.getSignature());
InstructionList write_il = new InstructionList();
boolean isfinal = false;
boolean isarray = false;
JavaClass field_class = null;
String basicname = null;
if (generator.isVerbose()) {
System.out.println(" writing reference field "
+ field.getName() + " of type "
+ field_type.getSignature());
}
if (field_type instanceof ObjectType) {
field_class = lookupClass(
((ObjectType) field_type).getClassName());
if (field_class != null && field_class.isFinal()) {
isfinal = true;
}
} else if (field_type instanceof ArrayType) {
isarray = true;
Type el_type = ((ArrayType) field_type).getElementType();
if (el_type instanceof ObjectType) {
field_class = lookupClass(
((ObjectType) el_type).getClassName());
if (field_class != null && field_class.isFinal()) {
isfinal = true;
}
} else if (el_type instanceof BasicType) {
basicname = el_type.toString();
}
}
if ((basicname != null)
|| (isfinal
&& (SerializationInfo.hasIbisConstructor(field_class)
|| (SerializationInfo.isSerializable(field_class)
&& generator.forceGeneratedCalls())))) {
// If there is an object replacer, we cannot do the
// "fast" code.
write_il.append(new ACONST_NULL());
write_il.append(new ALOAD(1));
write_il.append(factory.createFieldAccess(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, VARIABLE_REPLACER,
new ObjectType(TYPE_IBIS_IO_REPLACER),
Constants.GETFIELD));
IF_ACMPEQ replacertest = new IF_ACMPEQ(null);
write_il.append(replacertest);
write_il.append(writeInstructions(field));
GOTO toEnd = new GOTO(null);
write_il.append(toEnd);
// "fast" code.
replacertest.setTarget(write_il.append(new ALOAD(1)));
write_il.append(new ALOAD(0));
write_il.append(factory.createFieldAccess(classname,
field.getName(), field_type, Constants.GETFIELD));
if (basicname != null) {
write_il.append(factory.createFieldAccess(
TYPE_IBIS_IO_CONSTANTS,
"TYPE_" + basicname.toUpperCase(), Type.INT,
Constants.GETSTATIC));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_KNOWN_ARRAY_HEADER,
Type.INT, new Type[] { Type.OBJECT, Type.INT },
Constants.INVOKEVIRTUAL));
} else {
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_KNOWN_OBJECT_HEADER,
Type.INT, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
}
write_il.append(new ISTORE(2));
write_il.append(new ILOAD(2));
write_il.append(new ICONST(1));
IF_ICMPNE ifcmp = new IF_ICMPNE(null);
write_il.append(ifcmp);
if (isarray) {
write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(factory.createFieldAccess(classname,
field.getName(), field_type, Constants.GETFIELD));
write_il.append(new ARRAYLENGTH());
write_il.append(new DUP());
write_il.append(new ISTORE(4));
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_INT,
Type.VOID, new Type[] { Type.INT },
Constants.INVOKEVIRTUAL));
if (basicname != null) {
write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(
factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.GETFIELD));
write_il.append(new ICONST(0));
write_il.append(new ILOAD(4));
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
writeCallName(basicname), Type.VOID,
new Type[] { field_type, Type.INT,
Type.INT },
Constants.INVOKEVIRTUAL));
} else {
write_il.append(new ICONST(0));
write_il.append(new ISTORE(3));
GOTO gto = new GOTO(null);
write_il.append(gto);
InstructionHandle loop_body_start
= write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(
factory.createFieldAccess(classname,
field.getName(), field_type,
Constants.GETFIELD));
write_il.append(new ILOAD(3));
write_il.append(new AALOAD());
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_KNOWN_OBJECT_HEADER, Type.INT,
new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(new ISTORE(2));
write_il.append(new ILOAD(2));
write_il.append(new ICONST(1));
IF_ICMPNE ifcmp1 = new IF_ICMPNE(null);
write_il.append(ifcmp1);
write_il.append(new ALOAD(0));
write_il.append(
factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.GETFIELD));
write_il.append(new ILOAD(3));
write_il.append(new AALOAD());
write_il.append(new ALOAD(1));
write_il.append(
createGeneratedWriteObjectInvocation(
field_class.getClassName(),
Constants.INVOKEVIRTUAL));
ifcmp1.setTarget(write_il.append(new IINC(3, 1)));
gto.setTarget(write_il.append(new ILOAD(4)));
write_il.append(new ILOAD(3));
write_il.append(new IF_ICMPGT(loop_body_start));
}
} else {
write_il.append(new ALOAD(0));
write_il.append(
factory.createFieldAccess(classname,
field.getName(), field_type,
Constants.GETFIELD));
write_il.append(new ALOAD(1));
write_il.append(
createGeneratedWriteObjectInvocation(
field_class.getClassName(),
Constants.INVOKEVIRTUAL));
}
InstructionHandle target = write_il.append(new NOP());
ifcmp.setTarget(target);
toEnd.setTarget(target);
} else {
write_il.append(writeInstructions(field));
}
return write_il;
}
private InstructionList serialPersistentWrites(MethodGen write_gen) {
Instruction persistent_field_access = factory.createFieldAccess(
classname, FIELD_SERIAL_PERSISTENT_FIELDS, new ArrayType(
new ObjectType(TYPE_JAVA_IO_OBJECT_STREAM_FIELD), 1),
Constants.GETSTATIC);
InstructionList write_il = new InstructionList();
int[] case_values = new int[] { CASE_BOOLEAN, CASE_CHAR, CASE_DOUBLE, CASE_FLOAT, CASE_INT, CASE_LONG, CASE_SHORT,
CASE_OBJECT };
InstructionHandle[] case_handles
= new InstructionHandle[case_values.length];
GOTO[] gotos = new GOTO[case_values.length + 1];
for (int i = 0; i < gotos.length; i++) {
gotos[i] = new GOTO(null);
}
write_il.append(new SIPUSH((short) 0));
write_il.append(new ISTORE(2));
GOTO gto = new GOTO(null);
write_il.append(gto);
InstructionHandle loop_body_start
= write_il.append(persistent_field_access);
write_il.append(new ILOAD(2));
write_il.append(new AALOAD());
write_il.append(
factory.createInvoke(TYPE_JAVA_IO_OBJECT_STREAM_FIELD,
METHOD_GET_NAME, Type.STRING, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
write_il.append(new ASTORE(3));
InstructionHandle begin_try = write_il.append(new PUSH(
constantpool, classname));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_CLASS, METHOD_FOR_NAME,
java_lang_class_type, new Type[] { Type.STRING },
Constants.INVOKESTATIC));
write_il.append(new ALOAD(3));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_CLASS, METHOD_GET_FIELD,
new ObjectType(TYPE_JAVA_LANG_REFLECT_FIELD),
new Type[] { Type.STRING }, Constants.INVOKEVIRTUAL));
write_il.append(new ASTORE(4));
write_il.append(persistent_field_access);
write_il.append(new ILOAD(2));
write_il.append(new AALOAD());
write_il.append(factory.createInvoke(TYPE_JAVA_IO_OBJECT_STREAM_FIELD,
METHOD_GET_TYPE_CODE, Type.CHAR, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
case_handles[0] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_BOOLEAN, Type.BOOLEAN, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_BOOLEAN,
Type.VOID, new Type[] { Type.BOOLEAN },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[0]);
case_handles[1] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_CHAR, Type.CHAR, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_CHAR, Type.VOID, new Type[] { Type.INT },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[1]);
case_handles[2] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_DOUBLE, Type.DOUBLE, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_DOUBLE,
Type.VOID, new Type[] { Type.DOUBLE },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[2]);
case_handles[3] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_FLOAT, Type.FLOAT, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_FLOAT,
Type.VOID, new Type[] { Type.FLOAT },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[3]);
case_handles[4] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_INT, Type.INT, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_INT, Type.VOID, new Type[] { Type.INT },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[4]);
case_handles[5] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_LONG, Type.LONG, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_LONG,
Type.VOID, new Type[] { Type.LONG },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[5]);
case_handles[6] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_SHORT, Type.SHORT, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(
factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_SHORT,
Type.VOID, new Type[] { Type.INT },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[6]);
case_handles[7] = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_BOOLEAN, Type.BOOLEAN, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM, METHOD_WRITE_BOOLEAN,
Type.VOID, new Type[] { Type.BOOLEAN },
Constants.INVOKEVIRTUAL));
write_il.append(gotos[7]);
InstructionHandle default_handle = write_il.append(new ALOAD(1));
write_il.append(new ALOAD(4));
write_il.append(new ALOAD(0));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET, Type.OBJECT, new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_OBJECT, Type.VOID,
new Type[] { Type.OBJECT }, Constants.INVOKEVIRTUAL));
InstructionHandle end_try = write_il.append(gotos[8]);
write_il.insert(case_handles[0], new SWITCH(case_values,
case_handles, default_handle));
InstructionHandle handler = write_il.append(new ASTORE(6));
write_il.append(factory.createNew(TYPE_JAVA_IO_IOEXCEPTION));
write_il.append(new DUP());
write_il.append(factory.createNew(TYPE_JAVA_LANG_STRING_BUFFER));
write_il.append(new DUP());
write_il.append(
factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_INIT, Type.VOID, Type.NO_ARGS,
Constants.INVOKESPECIAL));
write_il.append(new PUSH(constantpool, "Could not write field "));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_APPEND, Type.STRINGBUFFER, new Type[] { Type.STRING },
Constants.INVOKEVIRTUAL));
write_il.append(new ALOAD(3));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_APPEND, Type.STRINGBUFFER, new Type[] { Type.STRING },
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_TO_STRING, Type.STRING, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
write_il.append(factory.createInvoke(TYPE_JAVA_IO_IOEXCEPTION,
METHOD_INIT, Type.VOID, new Type[] { Type.STRING },
Constants.INVOKESPECIAL));
write_il.append(new ATHROW());
InstructionHandle gotos_target = write_il.append(new IINC(2, 1));
for (int i = 0; i < gotos.length; i++) {
gotos[i].setTarget(gotos_target);
}
InstructionHandle loop_test = write_il.append(new ILOAD(2));
write_il.append(persistent_field_access);
gto.setTarget(loop_test);
write_il.append(new ARRAYLENGTH());
write_il.append(new IF_ICMPLT(loop_body_start));
write_gen.addExceptionHandler(begin_try, end_try, handler,
new ObjectType(TYPE_JAVA_LANG_EXCEPTION));
return write_il;
}
private InstructionList generateDefaultWrites(MethodGen write_gen) {
InstructionList write_il = new InstructionList();
if (has_serial_persistent_fields) {
return serialPersistentWrites(write_gen);
}
/* handle the primitive fields */
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
/* Don't send fields that are STATIC, or TRANSIENT */
if (!(field.isStatic() || field.isTransient())) {
Type field_type = Type.getType(field.getSignature());
if (field_type instanceof BasicType) {
if (generator.isVerbose()) {
System.out.println(" writing basic field "
+ field.getName() + " of type "
+ field.getSignature());
}
write_il.append(writeInstructions(field));
}
}
}
/* handle the reference fields */
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
/* Don't send fields that are STATIC or TRANSIENT */
if (!(field.isStatic() || field.isTransient())) {
Type field_type = Type.getType(field.getSignature());
if (field_type instanceof ReferenceType) {
if (generator.isVerbose()) {
System.out.println(" writing field "
+ field.getName() + " of type "
+ field.getSignature());
}
if (!field_type.equals(Type.STRING)
&& !field_type.equals(java_lang_class_type)) {
write_il.append(writeReferenceField(field));
} else {
write_il.append(writeInstructions(field));
}
}
}
}
return write_il;
}
private String readCallName(String name) {
return METHOD_READ_ARRAY + name.substring(0, 1).toUpperCase()
+ name.substring(1);
}
private InstructionList readReferenceField(Field field,
boolean from_constructor) {
Type field_type = Type.getType(field.getSignature());
InstructionList read_il = new InstructionList();
boolean isfinal = false;
boolean isarray = false;
JavaClass field_class = null;
String basicname = null;
if (generator.isVerbose()) {
System.out.println(" reading reference field "
+ field.getName() + " of type "
+ field_type.getSignature());
}
if (field_type instanceof ObjectType) {
field_class = lookupClass(
((ObjectType) field_type).getClassName());
if (field_class != null && field_class.isFinal()) {
isfinal = true;
}
} else if (field_type instanceof ArrayType) {
isarray = true;
Type el_type = ((ArrayType) field_type).getElementType();
if (el_type instanceof ObjectType) {
field_class = lookupClass(
((ObjectType) el_type).getClassName());
if (field_class != null && field_class.isFinal()) {
isfinal = true;
}
} else if (el_type instanceof BasicType) {
basicname = el_type.toString();
}
}
if ((basicname != null)
|| (isfinal
&& (SerializationInfo.hasIbisConstructor(field_class)
|| (SerializationInfo.isSerializable(field_class)
&& generator.forceGeneratedCalls())))) {
read_il.append(new ALOAD(1));
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_KNOWN_TYPE_HEADER, Type.INT, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
read_il.append(new ISTORE(2));
read_il.append(new ILOAD(2));
read_il.append(new ICONST(-1));
IF_ICMPNE ifcmp = new IF_ICMPNE(null);
read_il.append(ifcmp);
if (isarray) {
if (basicname != null) {
String callname = readCallName(basicname);
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM, callname, field_type,
Type.NO_ARGS, Constants.INVOKEVIRTUAL));
read_il.append(
factory.createFieldAccess(
classname, field.getName(),
field_type, Constants.PUTFIELD));
} else {
Type el_type
= ((ArrayType) field_type).getElementType();
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM, METHOD_READ_INT, Type.INT,
Type.NO_ARGS, Constants.INVOKEVIRTUAL));
read_il.append(new DUP());
read_il.append(new ISTORE(3));
read_il.append(factory.createNewArray(el_type,
(short) 1));
read_il.append(
factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.PUTFIELD));
read_il.append(new ALOAD(1));
read_il.append(new ALOAD(0));
read_il.append(
factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.GETFIELD));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_ADD_OBJECT_TO_CYCLE_CHECK, Type.VOID,
new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
read_il.append(new ICONST(0));
read_il.append(new ISTORE(4));
GOTO gto1 = new GOTO(null);
read_il.append(gto1);
InstructionHandle loop_body_start
= read_il.append(new ALOAD(1));
read_il.append(
factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_KNOWN_TYPE_HEADER, Type.INT,
Type.NO_ARGS, Constants.INVOKEVIRTUAL));
read_il.append(new ISTORE(2));
read_il.append(new ILOAD(2));
read_il.append(new ICONST(-1));
IF_ICMPNE ifcmp1 = new IF_ICMPNE(null);
read_il.append(ifcmp1);
read_il.append(new ALOAD(0));
read_il.append(
factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.GETFIELD));
read_il.append(new ILOAD(4));
read_il.append(factory.createNew((ObjectType) el_type));
read_il.append(new DUP());
read_il.append(new ALOAD(1));
read_il.append(createInitInvocation(
field_class.getClassName(), factory));
read_il.append(new AASTORE());
GOTO gto2 = new GOTO(null);
read_il.append(gto2);
InstructionHandle cmp_goto2 = read_il.append(new ILOAD(
2));
ifcmp1.setTarget(cmp_goto2);
read_il.append(new ICONST(0));
IF_ICMPEQ ifcmpeq2 = new IF_ICMPEQ(null);
read_il.append(ifcmpeq2);
read_il.append(new ALOAD(0));
read_il.append(factory.createFieldAccess(classname,
field.getName(),
field_type, Constants.GETFIELD));
read_il.append(new ILOAD(4));
read_il.append(new ALOAD(1));
read_il.append(new ILOAD(2));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_GET_OBJECT_FROM_CYCLE_CHECK, Type.OBJECT,
new Type[] { Type.INT },
Constants.INVOKEVIRTUAL));
read_il.append(
factory.createCheckCast(
(ReferenceType) el_type));
read_il.append(new AASTORE());
InstructionHandle target2 = read_il.append(new NOP());
ifcmpeq2.setTarget(target2);
gto2.setTarget(target2);
read_il.append(new IINC(4, 1));
gto1.setTarget(read_il.append(new ILOAD(3)));
read_il.append(new ILOAD(4));
read_il.append(new IF_ICMPGT(loop_body_start));
}
} else {
read_il.append(new ALOAD(0));
read_il.append(factory.createNew((ObjectType) field_type));
read_il.append(new DUP());
read_il.append(new ALOAD(1));
read_il.append(
createInitInvocation(field_class.getClassName(),
factory));
read_il.append(
factory.createFieldAccess(classname,
field.getName(), field_type,
Constants.PUTFIELD));
}
GOTO gto = new GOTO(null);
read_il.append(gto);
InstructionHandle cmp_goto = read_il.append(new ILOAD(2));
ifcmp.setTarget(cmp_goto);
read_il.append(new ICONST(0));
IF_ICMPEQ ifcmpeq = new IF_ICMPEQ(null);
read_il.append(ifcmpeq);
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(new ILOAD(2));
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_GET_OBJECT_FROM_CYCLE_CHECK, Type.OBJECT,
new Type[] { Type.INT }, Constants.INVOKEVIRTUAL));
read_il.append(
factory.createCheckCast((ReferenceType) field_type));
read_il.append(
factory.createFieldAccess(classname, field.getName(),
field_type, Constants.PUTFIELD));
InstructionHandle target = read_il.append(new NOP());
ifcmpeq.setTarget(target);
gto.setTarget(target);
} else {
read_il.append(readInstructions(field, from_constructor));
}
return read_il;
}
private InstructionHandle generateReadField(String tpname, Type tp,
InstructionList read_il, GOTO gto, boolean from_constructor) {
InstructionHandle h;
if (from_constructor || !has_final_fields) {
h = read_il.append(new ALOAD(4));
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
if (tpname.equals("")) {
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_OBJECT, Type.OBJECT, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
} else {
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ + tpname, tp, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
}
read_il.append(
factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_SET + tpname, Type.VOID,
new Type[] { Type.OBJECT, tp },
Constants.INVOKEVIRTUAL));
read_il.append(gto);
return h;
}
h = read_il.append(new ILOAD(5));
read_il.append(new PUSH(constantpool, Constants.ACC_FINAL));
read_il.append(new IAND());
IFEQ eq = new IFEQ(null);
read_il.append(eq);
read_il.append(new ALOAD(1));
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(3));
if (tpname.equals("")) {
read_il.append(new ALOAD(4));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_TYPE, new ObjectType(TYPE_JAVA_LANG_CLASS),
Type.NO_ARGS, Constants.INVOKEVIRTUAL));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_CLASS,
METHOD_GET_NAME, Type.STRING, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_FIELD_OBJECT, Type.VOID, new Type[] { Type.OBJECT,
Type.STRING, Type.STRING },
Constants.INVOKEVIRTUAL));
} else {
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM, METHOD_READ_FIELD
+ tpname, Type.VOID, new Type[] { Type.OBJECT,
Type.STRING }, Constants.INVOKEVIRTUAL));
}
GOTO gto2 = new GOTO(null);
read_il.append(gto2);
eq.setTarget(read_il.append(new ALOAD(4)));
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
if (tpname.equals("")) {
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_OBJECT, tp, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
} else {
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ + tpname, tp, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
}
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_SET + tpname, Type.VOID, new Type[] { Type.OBJECT, tp },
Constants.INVOKEVIRTUAL));
gto2.setTarget(read_il.append(gto));
return h;
}
private InstructionList serialPersistentReads(boolean from_constructor,
MethodGen read_gen) {
Instruction persistent_field_access = factory.createFieldAccess(
classname, FIELD_SERIAL_PERSISTENT_FIELDS, new ArrayType(
new ObjectType(TYPE_JAVA_IO_OBJECT_STREAM_FIELD), 1),
Constants.GETSTATIC);
InstructionList read_il = new InstructionList();
int[] case_values = new int[] { CASE_BOOLEAN, CASE_CHAR, CASE_DOUBLE, CASE_FLOAT, CASE_INT, CASE_LONG, CASE_SHORT,
CASE_OBJECT };
InstructionHandle[] case_handles
= new InstructionHandle[case_values.length];
GOTO[] gotos = new GOTO[case_values.length + 1];
for (int i = 0; i < gotos.length; i++) {
gotos[i] = new GOTO(null);
}
read_il.append(new SIPUSH((short) 0));
read_il.append(new ISTORE(2));
GOTO gto = new GOTO(null);
read_il.append(gto);
InstructionHandle loop_body_start
= read_il.append(persistent_field_access);
read_il.append(new ILOAD(2));
read_il.append(new AALOAD());
read_il.append(factory.createInvoke(TYPE_JAVA_IO_OBJECT_STREAM_FIELD,
METHOD_GET_NAME, Type.STRING, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
read_il.append(new ASTORE(3));
InstructionHandle begin_try = read_il.append(new PUSH(constantpool,
classname));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_CLASS, METHOD_FOR_NAME,
java_lang_class_type, new Type[] { Type.STRING },
Constants.INVOKESTATIC));
read_il.append(new ALOAD(3));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_CLASS, METHOD_GET_FIELD,
new ObjectType(TYPE_JAVA_LANG_REFLECT_FIELD),
new Type[] { Type.STRING }, Constants.INVOKEVIRTUAL));
read_il.append(new ASTORE(4));
if (!from_constructor && has_final_fields) {
read_il.append(new ALOAD(4));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_REFLECT_FIELD,
METHOD_GET_MODIFIERS, Type.INT, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
read_il.append(new ISTORE(5));
}
read_il.append(persistent_field_access);
read_il.append(new ILOAD(2));
read_il.append(new AALOAD());
read_il.append(factory.createInvoke(TYPE_JAVA_IO_OBJECT_STREAM_FIELD,
METHOD_GET_TYPE_CODE, Type.CHAR, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
case_handles[0] = generateReadField(TYPE_BYTE, Type.BYTE, read_il,
gotos[0], from_constructor);
case_handles[1] = generateReadField(TYPE_CHAR, Type.CHAR, read_il,
gotos[1], from_constructor);
case_handles[2] = generateReadField(TYPE_DOUBLE, Type.DOUBLE, read_il,
gotos[2], from_constructor);
case_handles[3] = generateReadField(TYPE_FLOAT, Type.FLOAT, read_il,
gotos[3], from_constructor);
case_handles[4] = generateReadField(TYPE_INT, Type.INT, read_il,
gotos[4], from_constructor);
case_handles[5] = generateReadField(TYPE_LONG, Type.LONG, read_il,
gotos[5], from_constructor);
case_handles[6] = generateReadField(TYPE_SHORT, Type.SHORT, read_il,
gotos[6], from_constructor);
case_handles[7] = generateReadField(TYPE_BOOLEAN, Type.BOOLEAN,
read_il, gotos[7], from_constructor);
InstructionHandle default_handle = generateReadField("",
Type.OBJECT, read_il, gotos[8], from_constructor);
InstructionHandle end_try = read_il.getEnd();
read_il.insert(case_handles[0], new SWITCH(case_values,
case_handles, default_handle));
InstructionHandle handler = read_il.append(new ASTORE(6));
read_il.append(factory.createNew(TYPE_JAVA_IO_IOEXCEPTION));
read_il.append(new DUP());
read_il.append(factory.createNew(TYPE_JAVA_LANG_STRING_BUFFER));
read_il.append(new DUP());
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_INIT, Type.VOID, Type.NO_ARGS,
Constants.INVOKESPECIAL));
read_il.append(new PUSH(constantpool, "Could not read field "));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_APPEND, Type.STRINGBUFFER, new Type[] { Type.STRING },
Constants.INVOKEVIRTUAL));
read_il.append(new ALOAD(3));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_APPEND, Type.STRINGBUFFER, new Type[] { Type.STRING },
Constants.INVOKEVIRTUAL));
read_il.append(factory.createInvoke(TYPE_JAVA_LANG_STRING_BUFFER,
METHOD_TO_STRING, Type.STRING, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
read_il.append(factory.createInvoke(TYPE_JAVA_IO_IOEXCEPTION,
METHOD_INIT, Type.VOID, new Type[] { Type.STRING },
Constants.INVOKESPECIAL));
read_il.append(new ATHROW());
InstructionHandle gotos_target = read_il.append(new IINC(2, 1));
for (int i = 0; i < gotos.length; i++) {
gotos[i].setTarget(gotos_target);
}
InstructionHandle loop_test = read_il.append(new ILOAD(2));
read_il.append(persistent_field_access);
gto.setTarget(loop_test);
read_il.append(new ARRAYLENGTH());
read_il.append(new IF_ICMPLT(loop_body_start));
read_gen.addExceptionHandler(begin_try, end_try, handler,
new ObjectType(TYPE_JAVA_LANG_EXCEPTION));
return read_il;
}
private InstructionList generateDefaultReads(boolean from_constructor,
MethodGen read_gen) {
InstructionList read_il = new InstructionList();
if (has_serial_persistent_fields) {
return serialPersistentReads(from_constructor, read_gen);
}
/* handle the primitive fields */
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
/* Don't send fields that are STATIC, or TRANSIENT */
if (!(field.isStatic() || field.isTransient())) {
Type field_type = Type.getType(field.getSignature());
if (field_type instanceof BasicType) {
if (generator.isVerbose()) {
System.out.println(" writing basic field "
+ field.getName() + " of type "
+ field_type.getSignature());
}
read_il.append(readInstructions(field,
from_constructor));
}
}
}
/* handle the reference fields. */
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
/* Don't send fields that are STATIC or TRANSIENT */
if (!(field.isStatic() || field.isTransient())) {
Type field_type = Type.getType(field.getSignature());
if (generator.isVerbose()) {
System.out.println(" writing field "
+ field.getName() + " of type "
+ field.getSignature());
}
if (field_type instanceof ReferenceType) {
if (!field_type.equals(Type.STRING)
&& !field_type.equals(java_lang_class_type)) {
read_il.append(readReferenceField(field,
from_constructor));
} else {
read_il.append(readInstructions(field,
from_constructor));
}
}
}
}
return read_il;
}
private JavaClass generateInstanceGenerator() {
/* Here we create a 'generator' object. We need this extra object
* for three reasons:
* 1) Because the object is created from the 'ibis.io' package
* (the Serialization code), we may not be allowed to create a
* new instance of the object (due to inter-package access
* restrictions, e.g. the object may not be public). Because
* the generator is in the same package as the target object,
* it can create a new object for us.
*
* ?? How about totally private objects ??
* can sun serialization handle this ??
*
* 2) Using this generator object, we can do a normal 'new' of the
* target type. This is important, because using 'newInstance' is
* 6 times more expensive than 'new'.
* 3) We do not want to invoke a default constructor, but a special
* constructor that immediately reads the object state from the
* stream. This cannot be done (efficiently) with newInstance.
*/
if (generator.isVerbose()) {
System.out.println(" Generating InstanceGenerator class for "
+ classname);
}
String name = classname + METHOD_IBIS_IO_GENERATOR;
ObjectType class_type = new ObjectType(classname);
String classfilename = name.substring(name.lastIndexOf('.') + 1)
+ ".class";
ClassGen iogenGen = new ClassGen(name, TYPE_IBIS_IO_GENERATOR,
classfilename, Constants.ACC_FINAL | Constants.ACC_PUBLIC
| Constants.ACC_SUPER, null);
InstructionFactory iogenFactory = new InstructionFactory(iogenGen);
InstructionList il = new InstructionList();
if (!is_externalizable && super_is_serializable
&& !super_has_ibis_constructor && !generator.forceGeneratedCalls()) {
/* This is a difficult case. We cannot call a constructor,
* because this constructor would be obliged to call a
* constructor for the super-class.
* So, we do it differently: generate calls to
* IbisSerializationInputStream methods which call native
* methods ... I don't know another solution to this problem.
*/
/* First, create the object. Through a native call, because
* otherwise the object would be marked uninitialized, and the
* code would not pass bytecode verification. This native call
* also takes care of calling the constructor of the first
* non-serializable superclass.
*/
il.append(new ALOAD(1));
int ind = iogenGen.getConstantPool().addString(classname);
il.append(new LDC(ind));
il.append(iogenFactory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_CREATE_UNINITIALIZED_OBJECT, Type.OBJECT,
new Type[] { Type.STRING }, Constants.INVOKEVIRTUAL));
il.append(iogenFactory.createCheckCast(class_type));
il.append(new ASTORE(2));
/* Now read the superclass. */
il.append(new ALOAD(1));
il.append(new ALOAD(2));
ind = iogenGen.getConstantPool().addString(super_classname);
il.append(new LDC(ind));
il.append(iogenFactory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_READ_SERIALIZABLE_OBJECT, Type.VOID, new Type[] {
Type.OBJECT, Type.STRING },
Constants.INVOKEVIRTUAL));
/* Now, if the class has a readObject, call it. Otherwise,
* read its fields, by calling generated_DefaultReadObject.
*/
if (SerializationInfo.hasReadObject(methods)) {
il.append(new ALOAD(2));
il.append(new ALOAD(1));
il.append(iogenFactory.createInvoke(classname,
METHOD_$READ_OBJECT_WRAPPER$, Type.VOID,
ibis_input_stream_arrtp, Constants.INVOKEVIRTUAL));
} else {
int dpth = getClassDepth(clazz);
il.append(new ALOAD(2));
il.append(new ALOAD(1));
il.append(new SIPUSH((short) dpth));
il.append(createGeneratedDefaultReadObjectInvocation(
classname, iogenFactory, Constants.INVOKEVIRTUAL));
}
il.append(new ALOAD(2));
} else {
il.append(iogenFactory.createNew(class_type));
il.append(new DUP());
il.append(new ALOAD(1));
il.append(createInitInvocation(classname, iogenFactory));
}
il.append(new ARETURN());
/*
0 new DITree
3 dup
4 aload_1
5 invokespecial DITree(ibis.io.IbisSerializationInputStream)
8 areturn
*/
MethodGen method = new MethodGen(
Constants.ACC_FINAL | Constants.ACC_PUBLIC, Type.OBJECT,
ibis_input_stream_arrtp, new String[] { VARIABLE_INPUT_STREAM },
METHOD_GENERATED_NEW_INSTANCE, name, il,
iogenGen.getConstantPool());
method.setMaxStack(3);
method.setMaxLocals();
method.addException(TYPE_JAVA_IO_IOEXCEPTION);
method.addException(TYPE_JAVA_LANG_CLASS_NOT_FOUND_EXCEPTION);
iogenGen.addMethod(method.getMethod());
il = new InstructionList();
il.append(new ALOAD(0));
il.append(iogenFactory.createInvoke(TYPE_IBIS_IO_GENERATOR, METHOD_INIT,
Type.VOID, Type.NO_ARGS, Constants.INVOKESPECIAL));
il.append(new RETURN());
method = new MethodGen(Constants.ACC_PUBLIC, Type.VOID,
Type.NO_ARGS, null, METHOD_INIT, name, il,
iogenGen.getConstantPool());
method.setMaxStack(1);
method.setMaxLocals();
iogenGen.addMethod(method.getMethod());
return iogenGen.getJavaClass();
}
void generateCode() {
/* Generate code inside the methods */
if (generator.isVerbose()) {
System.out.println(" Generating method code class for class : "
+ classname);
System.out.println(" Number of fields " + fields.length);
}
int dpth = getClassDepth(clazz);
fillInGeneratedDefaultWriteObjectMethod(dpth);
fillInGeneratedDefaultReadObjectMethod(dpth);
fillInGeneratedWriteObjectMethod(dpth);
fillInGeneratedReadObjectMethod(dpth);
clazz = gen.getJavaClass();
Repository.removeClass(classname);
Repository.addClass(clazz);
JavaClass instgen = null;
if (! is_abstract) {
instgen = generateInstanceGenerator();
Repository.addClass(instgen);
}
generator.markRewritten(clazz, instgen);
}
private void fillInGeneratedReadObjectMethod(int dpth) {
/* Now, produce the read constructor. It only exists if the
* superclass is not serializable, or if the superclass has an
* ibis constructor, or is assumed to have one (-force option).
*/
/* Now, do the same for the reading side. */
MethodGen mgen = null;
int index = -1;
InstructionList read_il = null;
if (is_externalizable || super_has_ibis_constructor
|| !super_is_serializable || generator.forceGeneratedCalls()) {
read_il = new InstructionList();
if (is_externalizable) {
read_il.append(new ALOAD(0));
read_il.append(factory.createInvoke(classname, METHOD_INIT,
Type.VOID, Type.NO_ARGS, Constants.INVOKESPECIAL));
} else if (!super_is_serializable) {
read_il.append(new ALOAD(0));
read_il.append(factory.createInvoke(super_classname,
METHOD_INIT, Type.VOID, Type.NO_ARGS,
Constants.INVOKESPECIAL));
} else {
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(createInitInvocation(super_classname,
factory));
}
if (is_externalizable || !super_is_serializable) {
read_il.append(new ALOAD(1));
read_il.append(new ALOAD(0));
read_il.append(
factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_ADD_OBJECT_TO_CYCLE_CHECK, Type.VOID,
new Type[] { Type.OBJECT },
Constants.INVOKEVIRTUAL));
}
int read_cons_index = SerializationInfo.findMethod(methods,
METHOD_INIT,
SIGNATURE_LIBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM_V);
mgen = new MethodGen(methods[read_cons_index], classname,
constantpool);
index = read_cons_index;
} else if (SerializationInfo.hasReadObject(methods)) {
int read_wrapper_index = SerializationInfo.findMethod(methods,
METHOD_$READ_OBJECT_WRAPPER$,
SIGNATURE_LIBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM_V);
mgen = new MethodGen(methods[read_wrapper_index], classname,
constantpool);
read_il = new InstructionList();
index = read_wrapper_index;
}
/* TODO: Shouldn't there be an else clause here that fills in a method?
* Even an exception throwing method might be better?
* There is almost certainly a problem here in some corner case.
*/
if (read_il != null) {
if (is_externalizable || SerializationInfo.hasReadObject(methods)) {
/* First, get and set IbisSerializationInputStream's idea of the current object. */
read_il.append(new ALOAD(1));
read_il.append(new ALOAD(0));
read_il.append(new SIPUSH((short) dpth));
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_PUSH_CURRENT_OBJECT, Type.VOID, new Type[] {
Type.OBJECT, Type.INT },
Constants.INVOKEVIRTUAL));
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_GET_JAVA_OBJECT_INPUT_STREAM,
sun_input_stream,
Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
if (is_externalizable) {
/* Invoke readExternal */
read_il.append(factory.createInvoke(classname,
METHOD_READ_EXTERNAL, Type.VOID,
new Type[] { new ObjectType(
TYPE_JAVA_IO_OBJECT_INPUT) },
Constants.INVOKEVIRTUAL));
} else {
/* Invoke readObject. */
read_il.append(factory.createInvoke(classname,
METHOD_READ_OBJECT, Type.VOID,
new Type[] { sun_input_stream },
Constants.INVOKESPECIAL));
}
/* And then, restore IbisSerializationOutputStream's idea of the current object. */
read_il.append(new ALOAD(1));
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_POP_CURRENT_OBJECT, Type.VOID, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
} else {
read_il.append(generateDefaultReads(true, mgen));
}
read_il.append(mgen.getInstructionList());
mgen.setInstructionList(read_il);
mgen.setMaxStack(MethodGen.getMaxStack(constantpool, read_il,
mgen.getExceptionHandlers()));
mgen.setMaxLocals();
gen.setMethodAt(mgen.getMethod(), index);
}
}
private void fillInGeneratedWriteObjectMethod(int dpth) {
/* Now, produce generated_WriteObject. */
int write_method_index = SerializationInfo.findMethod(methods,
METHOD_GENERATED_WRITE_OBJECT,
SIGNATURE_LIBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM_V);
InstructionList write_il = new InstructionList();
MethodGen write_gen = new MethodGen(methods[write_method_index], classname,
constantpool);
/* write the superclass if neccecary */
if (is_externalizable) {
/* Nothing to be done for the superclass. */
} else if (super_is_ibis_serializable
|| (generator.forceGeneratedCalls() && super_is_serializable)) {
write_il.append(new ALOAD(0));
write_il.append(new ALOAD(1));
write_il.append(createGeneratedWriteObjectInvocation(
super_classname, Constants.INVOKESPECIAL));
} else if (super_is_serializable) {
int ind = constantpool.addString(super_classname);
write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(new LDC(ind));
write_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_WRITE_SERIALIZABLE_OBJECT, Type.VOID, new Type[] {
Type.OBJECT, Type.STRING },
Constants.INVOKEVIRTUAL));
}
/* and now ... generated_WriteObject should either call the classes
* writeObject, if it has one, or call generated_DefaultWriteObject.
* The read constructor should either call readObject, or call
* generated_DefaultReadObject.
*/
if (is_externalizable || SerializationInfo.hasWriteObject(methods)) {
/* First, get and set IbisSerializationOutputStream's idea of
* the current object.
*/
write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(new SIPUSH((short) dpth));
write_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_PUSH_CURRENT_OBJECT, Type.VOID, new Type[] {
Type.OBJECT, Type.INT },
Constants.INVOKEVIRTUAL));
write_il.append(new ALOAD(0));
write_il.append(new ALOAD(1));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_GET_JAVA_OBJECT_OUTPUT_STREAM,
sun_output_stream,
Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
if (is_externalizable) {
/* Invoke writeExternal */
write_il.append(
factory.createInvoke(classname, METHOD_WRITE_EXTERNAL,
Type.VOID, new Type[] { new ObjectType(
TYPE_JAVA_IO_OBJECT_OUTPUT) },
Constants.INVOKEVIRTUAL));
} else {
/* Invoke writeObject. */
write_il.append(createWriteObjectInvocation());
}
/* And then, restore IbisSerializationOutputStream's idea of the current object. */
write_il.append(new ALOAD(1));
write_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_POP_CURRENT_OBJECT, Type.VOID, Type.NO_ARGS,
Constants.INVOKEVIRTUAL));
} else {
write_il.append(generateDefaultWrites(write_gen));
}
write_gen = new MethodGen(methods[write_method_index], classname,
constantpool);
write_il.append(write_gen.getInstructionList());
write_gen.setInstructionList(write_il);
write_gen.setMaxStack(MethodGen.getMaxStack(constantpool, write_il,
write_gen.getExceptionHandlers()));
write_gen.setMaxLocals();
gen.setMethodAt(write_gen.getMethod(), write_method_index);
}
private void fillInGeneratedDefaultWriteObjectMethod(int dpth) {
/* void generated_DefaultWriteObject(
* IbisSerializationOutputStream out, int level) {
* if (level == dpth) {
* ... write fields ... (the code resulting from the
* generateDefaultWrites() call).
* } else if (level < dpth) {
* super.generated_DefaultWriteObject(out, level);
* }
* }
*/
int default_write_method_index = SerializationInfo.findMethod(
methods,
METHOD_GENERATED_DEFAULT_WRITE_OBJECT,
SIGNATURE_LIBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM_I_V);
MethodGen write_gen = new MethodGen(
methods[default_write_method_index], classname,
constantpool);
InstructionList write_il = new InstructionList();
InstructionHandle end = write_gen.getInstructionList().getStart();
write_il.append(new ILOAD(2));
write_il.append(new SIPUSH((short) dpth));
IF_ICMPNE ifcmpne = new IF_ICMPNE(null);
write_il.append(ifcmpne);
write_il.append(generateDefaultWrites(write_gen));
write_il.append(new GOTO(end));
if (super_is_ibis_serializable || super_is_serializable) {
InstructionHandle i = write_il.append(new ILOAD(2));
ifcmpne.setTarget(i);
write_il.append(new SIPUSH((short) dpth));
write_il.append(new IF_ICMPGT(end));
if (super_is_ibis_serializable || generator.forceGeneratedCalls()) {
write_il.append(new ALOAD(0));
write_il.append(new ALOAD(1));
write_il.append(new ILOAD(2));
write_il.append(
createGeneratedDefaultWriteObjectInvocation(
super_classname));
} else {
/* Superclass is not rewritten.
*/
write_il.append(new ALOAD(1));
write_il.append(new ALOAD(0));
write_il.append(new ILOAD(2));
write_il.append(factory.createInvoke(
TYPE_IBIS_IO_IBIS_SERIALIZATION_OUTPUT_STREAM,
METHOD_DEFAULT_WRITE_SERIALIZABLE_OBJECT, Type.VOID,
new Type[] { Type.OBJECT, Type.INT },
Constants.INVOKEVIRTUAL));
}
} else {
ifcmpne.setTarget(end);
}
write_il.append(write_gen.getInstructionList());
write_gen.setInstructionList(write_il);
write_gen.setMaxStack(MethodGen.getMaxStack(constantpool, write_il,
write_gen.getExceptionHandlers()));
write_gen.setMaxLocals();
gen.setMethodAt(write_gen.getMethod(), default_write_method_index);
}
private void fillInGeneratedDefaultReadObjectMethod(int dpth) {
InstructionHandle end;
IF_ICMPNE ifcmpne;
int default_read_method_index = SerializationInfo.findMethod(
methods,
METHOD_GENERATED_DEFAULT_READ_OBJECT,
SIGNATURE_LIBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM_I_V);
MethodGen read_gen = new MethodGen(
methods[default_read_method_index], classname,
constantpool);
InstructionList read_il = new InstructionList();
end = read_gen.getInstructionList().getStart();
read_il.append(new ILOAD(2));
read_il.append(new SIPUSH((short) dpth));
ifcmpne = new IF_ICMPNE(null);
read_il.append(ifcmpne);
read_il.append(generateDefaultReads(false, read_gen));
read_il.append(new GOTO(end));
if (super_is_ibis_serializable || super_is_serializable) {
InstructionHandle i = read_il.append(new ILOAD(2));
ifcmpne.setTarget(i);
read_il.append(new SIPUSH((short) dpth));
read_il.append(new IF_ICMPGT(end));
if (super_is_ibis_serializable || generator.forceGeneratedCalls()) {
read_il.append(new ALOAD(0));
read_il.append(new ALOAD(1));
read_il.append(new ILOAD(2));
read_il.append(createGeneratedDefaultReadObjectInvocation(
super_classname, factory, Constants.INVOKESPECIAL));
} else {
/* Superclass is not rewritten.
*/
read_il.append(new ALOAD(1));
read_il.append(new ALOAD(0));
read_il.append(new ILOAD(2));
read_il.append(factory.createInvoke(TYPE_IBIS_IO_IBIS_SERIALIZATION_INPUT_STREAM,
METHOD_DEFAULT_READ_SERIALIZABLE_OBJECT, Type.VOID,
new Type[] { Type.OBJECT, Type.INT },
Constants.INVOKEVIRTUAL));
}
} else {
ifcmpne.setTarget(end);
}
read_il.append(read_gen.getInstructionList());
read_gen.setInstructionList(read_il);
read_gen.setMaxStack(MethodGen.getMaxStack(constantpool, read_il,
read_gen.getExceptionHandlers()));
read_gen.setMaxLocals();
gen.setMethodAt(read_gen.getMethod(), default_read_method_index);
}
}
| |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.autofill;
import com.google.common.annotations.VisibleForTesting;
import org.chromium.base.CalledByNative;
import org.chromium.base.JNINamespace;
import org.chromium.ui.base.WindowAndroid;
/**
* Java-side AutofillDialog and AutofillDialogFactory interfaces, and
* JNI glue for C++ AutofillDialogControllerAndroid.
*/
@JNINamespace("autofill")
public class AutofillDialogControllerAndroid {
private static AutofillDialogFactory sDialogFactory;
private long mNativeDelegate; // could be 0 after onDestroy().
private AutofillDialog mDialog;
/**
* An interface to the two possible continuations for the dialog.
* The dialog is expected to be dismissed when either of the calls is made.
*/
public interface AutofillDialogDelegate {
/**
* Cancels the requestAutocomplete.
*/
void dialogCancel();
/**
* Submits the data to the web-page and persists the last account/card/address choices.
* @param fullWallet Resulting billing/shipping information obtained from the user
* @param lastUsedChoiceIsAutofill Whether the last selected data source is Autofill
* @param lastUsedAccountName The last selected account name, or null
* @param guidLastUsedBilling GUID of the last selected Autofill billing address, or null
* @param guidLastUsedShipping GUID of the last selected Autofill shipping address, or null
* @param guidLastUsedCard GUID of the last selected Autofill credit card, or null
*/
void dialogContinue(
AutofillDialogResult.ResultWallet fullWallet,
boolean lastUsedChoiceIsAutofill, String lastUsedAccountName,
String guidLastUsedBilling, String guidLastUsedShipping, String guidLastUsedCard);
}
/**
* An interface that exposes the necessary functionality for an Autofill dialog.
* Note that all information necessary to construct the dialog is passed to the factory.
*/
public interface AutofillDialog {
/**
* Notifies the dialog that the C++ side is gone.
* The dialog needs to clear its reference to the no longer valid AutofillDialogDelegate.
*/
void onDestroy();
}
/**
* An interface to the factory that creates Autofill dialogs.
*/
public interface AutofillDialogFactory {
/**
* Creates the dialog.
* Reasonable attempts should be made to respect "initial choices",
* Initial choices don't have to be self-consistent or valid.
*
* @param delegate Continuations for the dialog
* @param windowAndroid Context in which the dialog should be shown
* @param requestFullBillingAddress Whether the full billing address is required
* @param requestShippingAddress Whether the shipping address is required
* @param requestPhoneNumbers Whether the phone numbers are required in addresses
* @param incognitoMode True if the dialog started from an incognito tab
* @param initialChoiceIsAutofill Whether the selected data source should be Autofill
* @param initialAccountName Account to be selected, or null
* @param initialBillingGuid GUID of the initial billing address selection in Autofill
* @param initialShippingGuid GUID of the initial shipping address selection in Autofill
* @param initialCardGuid GUID of the initial credit card selection in Autofill
* @param merchantDomain Scheme+origin for the originating web page, or null
* @param shippingCountries A list of allowed shipping countries, or null
* @param creditCardTypes A list of allowed credit card types (e.g. "VISA"), or null
* @return The Autofill dialog that would later call into the delegate, or null
*/
AutofillDialog createDialog(
final AutofillDialogDelegate delegate,
final WindowAndroid windowAndroid,
final boolean requestFullBillingAddress, final boolean requestShippingAddress,
final boolean requestPhoneNumbers,
final boolean incognitoMode,
final boolean initialChoiceIsAutofill, final String initialAccountName,
final String initialBillingGuid, final String initialShippingGuid,
final String initialCardGuid,
final String merchantDomain,
final String[] shippingCountries,
final String[] creditCardTypes);
}
/**
* Sets the factory to be used.
* @param factory An instance of the AutofillDialogFactory that will handle requests.
*/
public static void setDialogFactory(AutofillDialogFactory factory) {
sDialogFactory = factory;
}
@VisibleForTesting
private AutofillDialogControllerAndroid(
final long nativeAutofillDialogControllerAndroid,
final WindowAndroid windowAndroid,
final boolean requestFullBillingAddress, final boolean requestShippingAddress,
final boolean requestPhoneNumbers,
final boolean incognitoMode,
final boolean initialChoiceIsAutofill, final String initialWalletAccountName,
final String initialBillingGuid, final String initialShippingGuid,
final String initialCardGuid,
final String merchantDomain,
final String[] shippingCountries,
final String[] creditCardTypes) {
mNativeDelegate = nativeAutofillDialogControllerAndroid;
if (sDialogFactory == null) {
nativeDialogCancel(mNativeDelegate);
return;
}
AutofillDialogDelegate delegate = new AutofillDialogDelegate() {
@Override
public void dialogCancel() {
nativeDialogCancel(mNativeDelegate);
}
@Override
public void dialogContinue(
AutofillDialogResult.ResultWallet fullWallet,
boolean lastUsedChoiceIsAutofill, String lastUsedAccountName,
String guidLastUsedBilling, String guidLastUsedShipping,
String guidLastUsedCard) {
nativeDialogContinue(mNativeDelegate, fullWallet,
lastUsedChoiceIsAutofill, lastUsedAccountName,
guidLastUsedBilling, guidLastUsedShipping, guidLastUsedCard);
}
};
mDialog = sDialogFactory.createDialog(
delegate,
windowAndroid,
requestFullBillingAddress, requestShippingAddress,
requestPhoneNumbers,
incognitoMode,
initialChoiceIsAutofill, initialWalletAccountName,
initialBillingGuid, initialShippingGuid, initialCardGuid,
merchantDomain,
shippingCountries,
creditCardTypes);
if (mDialog == null) {
nativeDialogCancel(mNativeDelegate);
return;
}
}
@CalledByNative
private static AutofillDialogControllerAndroid create(
final long nativeAutofillDialogControllerAndroid,
final WindowAndroid windowAndroid,
final boolean requestFullBillingAddress, final boolean requestShippingAddress,
final boolean requestPhoneNumbers,
final boolean incognitoMode,
final boolean initialChoiceIsAutofill, final String initialWalletAccountName,
final String initialBillingGuid, final String initialShippingGuid,
final String initialCreditCardGuid,
final String merchantDomain,
final String[] shippingCountries,
final String[] creditCardTypes) {
return new AutofillDialogControllerAndroid(
nativeAutofillDialogControllerAndroid, windowAndroid,
requestFullBillingAddress, requestShippingAddress, requestPhoneNumbers,
incognitoMode,
initialChoiceIsAutofill, initialWalletAccountName,
initialBillingGuid, initialShippingGuid,
initialCreditCardGuid,
merchantDomain,
shippingCountries,
creditCardTypes);
}
@CalledByNative
private static boolean isDialogAllowed(boolean isInvokedFromTheSameOrigin) {
// TODO(aruslan): cross-origin invocations should be allowed with a
// warning messge.
return isInvokedFromTheSameOrigin;
}
@CalledByNative
private void onDestroy() {
if (mNativeDelegate == 0) return;
if (mDialog != null) mDialog.onDestroy();
mDialog = null;
mNativeDelegate = 0;
}
// Calls from Java to C++ AutofillDialogControllerAndroid:
private native void nativeDialogCancel(long nativeAutofillDialogControllerAndroid);
private native void nativeDialogContinue(long nativeAutofillDialogControllerAndroid,
Object fullWallet,
boolean lastUsedChoiceIsAutofill, String lastUsedAccountName,
String guidLastUsedBilling, String guidLastUsedShipping, String guidLastUsedCard);
}
| |
/**
* Copyright (C) 2013 cherimojava (http://github.com/cherimojava/cherimodata) Licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.cherimojava.data.mongo.entity;
import static com.github.cherimojava.data.mongo.entity.EntityUtils.getMongoNameFromMethod;
import static com.google.common.base.Preconditions.checkArgument;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.validation.Validator;
import org.bson.types.ObjectId;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
/**
* Contains information about a Entity class, like it's collection name, properties etc
*
* @author philnate
* @since 1.0.0
*/
public final class EntityProperties
{
/**
* the class this EntityProperties reflect
*/
private final Class<? extends Entity> clazz;
/**
* name of the collection which contains this entity
*/
private final String collectionName;
/**
* Stores ParameterProperties linked by their pojo name
*/
private final Map<String, ParameterProperty> pojoNames;
/**
* Stores ParameterProperties linked by their mongo name
*/
private final Map<String, ParameterProperty> mongoNames;
/**
* list of properties containing validation annotation
*/
private final List<ParameterProperty> validationProperties;
/**
* list of all properties
*/
private final List<ParameterProperty> properties;
/**
* tells if we have an explicitly defined Id or not
*/
private final boolean explicitId;
private final ParameterProperty id;
private EntityProperties( Builder builder )
{
this.clazz = builder.clazz;
this.collectionName = builder.collectionName;
boolean explicitId = false;
ImmutableMap.Builder<String, ParameterProperty> pojo = new ImmutableMap.Builder<>();
ImmutableMap.Builder<String, ParameterProperty> mongo = new ImmutableMap.Builder<>();
ImmutableList.Builder<ParameterProperty> valProps = new ImmutableList.Builder<>();
ImmutableList.Builder<ParameterProperty> props = new ImmutableList.Builder<>();
ParameterProperty idP = null;
for ( Method m : builder.properties )
{
ParameterProperty pp = ParameterProperty.Builder.buildFrom( m, builder.validator );
pojo.put( pp.getPojoName(), pp );
mongo.put( pp.getMongoName(), pp );
props.add( pp );
if ( Entity.ID.equals( pp.getMongoName() ) )
{
explicitId = true;
idP = pp;
}
if ( pp.hasConstraints() )
{
valProps.add( pp );
}
}
// check if we have an explicit id, if we don't, create a property for it
// TODO add to the validator or so, the capability to validate implicit id too
if ( !explicitId )
{
idP = new ParameterProperty.Builder().setMongoName( Entity.ID ).setPojoName( Entity.ID )
.setType( ObjectId.class ).setTransient( false ).hasConstraints( false )
.setValidator( builder.validator ).build();
pojo.put( Entity.ID, idP );
mongo.put( Entity.ID, idP );
props.add( idP );
}
this.pojoNames = pojo.build();
this.mongoNames = mongo.build();
this.validationProperties = valProps.build();
this.properties = props.build();
this.explicitId = explicitId;
id = idP;
}
/**
* retrieves the corresponding ParameterProperty from a given method or null if not found
*
* @param m method to retrieve parameter properties from.
* @return ParameterProperty if found or null otherwise
*/
public ParameterProperty getProperty( Method m )
{
return pojoNames.get( EntityUtils.getPojoNameFromMethod( m ) );
}
/**
* retrieves the corresponding ParameterProperty from the given MongoName or null if no such property exists
*
* @param name to load mongo properties from
* @return ParameterProperty if found or null otherwise
*/
public ParameterProperty getProperty( String name )
{
return mongoNames.get( name );
}
/**
* returns all properties belonging to this Entity
*
* @return list of all ParameterProperties for this entity
*/
public List<ParameterProperty> getProperties()
{
return properties;
}
/**
* returns the entity class which this entity property represents
*/
public Class<? extends Entity> getEntityClass()
{
return clazz;
}
public List<ParameterProperty> getValidationProperties()
{
return validationProperties;
}
/**
* returns the name of the collection this Entity will be saved to
*
* @return
*/
public String getCollectionName()
{
return collectionName;
}
/**
* returns if for this entity an explicit id was defined or not return true if an explicit Id was defined, either
* through @Id or @Named("_id")
*/
public boolean hasExplicitId()
{
return explicitId;
}
public ParameterProperty getIdProperty()
{
return id;
}
static class Builder
{
private Class<? extends Entity> clazz;
private String collectionName;
/**
* List of Properties to add later
*/
private List<Method> properties;
private Set<String> mongoNames;
private Validator validator;
Builder()
{
properties = Lists.newArrayList();
mongoNames = Sets.newHashSet();
}
Builder setCollectionName( String name )
{
this.collectionName = name;
return this;
}
Builder setEntityClass( Class<? extends Entity> clazz )
{
this.clazz = clazz;
return this;
}
Builder addParameter( Method m )
{
String mongoName = getMongoNameFromMethod( m );
checkArgument( mongoNames.add( mongoName ), "Entity contains already a property whose name is %s",
mongoName );
properties.add( m );
return this;
}
EntityProperties build()
{
return new EntityProperties( this );
}
Builder setValidator( Validator validator )
{
this.validator = validator;
return this;
}
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.joy.core;
import jodd.db.DbManager;
import jodd.db.DbSessionProvider;
import jodd.db.connection.ConnectionProvider;
import jodd.db.oom.DbOomManager;
import jodd.db.oom.config.AutomagicDbOomConfigurator;
import jodd.db.pool.CoreConnectionPool;
import jodd.joy.exception.AppException;
import jodd.joy.jtx.meta.ReadWriteTransaction;
import jodd.jtx.JtxTransactionManager;
import jodd.db.jtx.DbJtxSessionProvider;
import jodd.db.jtx.DbJtxTransactionManager;
import jodd.jtx.meta.Transaction;
import jodd.jtx.proxy.AnnotationTxAdvice;
import jodd.jtx.proxy.AnnotationTxAdviceManager;
import jodd.jtx.proxy.AnnotationTxAdviceSupport;
import jodd.petite.PetiteContainer;
import jodd.petite.config.AutomagicPetiteConfigurator;
import jodd.petite.proxetta.ProxettaAwarePetiteContainer;
import jodd.petite.scope.SessionScope;
import jodd.petite.scope.SingletonScope;
import jodd.props.Props;
import jodd.props.PropsUtil;
import jodd.proxetta.MethodInfo;
import jodd.proxetta.ProxyAspect;
import jodd.proxetta.impl.ProxyProxetta;
import jodd.proxetta.pointcuts.MethodAnnotationPointcut;
import jodd.util.ClassLoaderUtil;
import jodd.util.SystemUtil;
import jodd.log.Logger;
import jodd.log.LoggerFactory;
import java.lang.annotation.Annotation;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
/**
* Default application core. Contains init points to
* all application frameworks and layers.
*/
public abstract class DefaultAppCore {
/**
* Application system property - application folder.
*/
public static final String APP_DIR = "app.dir";
/**
* Application system property - flag if web application is detected..
*/
public static final String APP_WEB = "app.web";
/**
* Petite bean name for AppCore (this instance).
*/
public static final String PETITE_CORE = "core";
/**
* Petite bean name for database pool.
*/
public static final String PETITE_DBPOOL = "dbpool";
/**
* Petite bean name for <code>DbManager</code> instance.
*/
public static final String PETITE_DB = "db";
/**
* Petite bean name for <code>DbOomManager</code> instance.
*/
public static final String PETITE_DBOOM = "dboom";
/**
* Petite bean name for {@link AppInit} bean.
*/
public static final String PETITE_INIT = "init";
/**
* Petite bean name for application props.
*/
public static final String PETITE_PROPS = "props";
/**
* Petite bean name for {@link AppScanner} bean.
*/
public static final String PETITE_SCAN = "scan";
/**
* Logger. Resolved during {@link #initLogger() initialization}.
*/
protected static Logger log;
/**
* App dir. Resolved during initialization.
*/
protected String appDir;
/**
* Is web application. Resolved during initialization.
*/
protected boolean isWebApplication;
/**
* Props profiles. If set, overrides any default profile set
* in the props files.
*/
protected String[] appPropsProfiles;
/**
* Default constructor.
*/
protected DefaultAppCore() {
}
// ---------------------------------------------------------------- init
/**
* Returns <code>true</code> if application is started as a part of web application.
*/
public boolean isWebApplication() {
return isWebApplication;
}
/**
* Returns application directory.
*/
public String getAppDir() {
return appDir;
}
/**
* Initializes application core, invoked very first!
* Important: logging is not yet available in this method!
*/
@SuppressWarnings("unchecked")
public void initCore() {
if (appPropsName == null) {
appPropsName = "app.props";
}
if (appPropsNamePattern == null) {
int index = appPropsName.lastIndexOf('.');
appPropsNamePattern = '/' + appPropsName.substring(0, index) + "*.prop*";
}
if (jtxAnnotations == null) {
jtxAnnotations = new Class[] {Transaction.class, ReadWriteTransaction.class};
}
if (jtxScopePattern == null) {
jtxScopePattern = "$class";
}
if (appDir == null) {
resolveAppDir(appPropsName); // app directory is resolved from location of 'app.props'.
}
System.setProperty(APP_DIR, appDir);
System.setProperty(APP_WEB, Boolean.toString(isWebApplication));
}
/**
* Initializes the logger, after the log path is {@link #init() defined}.
*/
protected void initLogger() {
if (log != null) {
return;
}
log = LoggerFactory.getLogger(DefaultAppCore.class);
log.info("app dir: " + appDir);
}
/**
* Resolves application root folders.
* <p>
* If application is started as web application, app folder is one below the WEB-INF folder.
* Otherwise, the root folder is equal to the working folder.
*/
protected void resolveAppDir(String classPathFileName) {
URL url = ClassLoaderUtil.getResourceUrl(classPathFileName);
if (url == null) {
throw new AppException("Failed to resolve app dir, missing: " + classPathFileName);
}
String protocol = url.getProtocol();
if (protocol.equals("file") == false) {
try {
url = new URL(url.getFile());
} catch (MalformedURLException ignore) {
}
}
appDir = url.getFile();
int ndx = appDir.indexOf("WEB-INF");
isWebApplication = (ndx != -1);
appDir = isWebApplication ? appDir.substring(0, ndx) : SystemUtil.workingFolder();
}
// ---------------------------------------------------------------- ready
/**
* Called after the {@link #init() core initialization},
* during the {@link #start() application startup}.
*/
protected void ready() {
}
// ---------------------------------------------------------------- start
protected boolean initialized;
/**
* Initializes application.
* May be called several times, but the core
* will be initialized just once.
* Usually called manually when core needs to
* be created before server is started
* (e.g. in embedded environments)
*/
public void init() {
if (initialized) {
return;
}
initCore();
initLogger();
initProps();
initScanner();
initialized = true;
}
/**
* Starts the application and performs all initialization.
*/
public void start() {
init();
ready();
try {
startProxetta();
startPetite();
startDb();
startApp();
log.info("app started");
} catch (RuntimeException rex) {
if (log != null) {
log.error(rex.toString(), rex);
} else {
System.out.println(rex.toString());
rex.printStackTrace();
}
try {
stop();
} catch (Exception ignore) {
}
throw rex;
}
}
/**
* Stops the application.
*/
public void stop() {
if (log != null) {
log.info("shutting down...");
}
stopApp();
stopDb();
stopPetite();
if (log != null) {
log.info("app stopped");
}
}
// ---------------------------------------------------------------- props
/**
* Main application props file name, must exist in class path.
*/
protected String appPropsName;
/**
* Application props file name pattern.
*/
protected String appPropsNamePattern;
/**
* Application props.
*/
protected Props appProps;
/**
* Returns applications properties loaded from props files.
*/
public Props getAppProps() {
return appProps;
}
/**
* Creates and loads application props.
* It first loads system properties (registered as <code>sys.*</code>)
* and then environment properties (registered as <code>env.*</code>).
* Finally, props files are read from the classpath. All properties
* are loaded using
* <p>
* If props have been already loaded, does nothing.
*/
protected void initProps() {
if (appProps != null) {
return;
}
appProps = createProps();
appProps.loadSystemProperties("sys");
appProps.loadEnvironment("env");
PropsUtil.loadFromClasspath(appProps, appPropsNamePattern);
if (appPropsProfiles != null) {
appProps.setActiveProfiles(appPropsProfiles);
}
}
/**
* Creates new Props. Empty props will be ignored,
* and missing macros will be resolved as empty string.
*/
protected Props createProps() {
Props props = new Props();
props.setSkipEmptyProps(true);
props.setIgnoreMissingMacros(true);
return props;
}
// ---------------------------------------------------------------- scanning
protected AppScanner appScanner;
/**
* Returns scanner.
*/
public AppScanner getAppScanner() {
return appScanner;
}
/**
* Initializes {@link AppScanner}.
*/
protected void initScanner() {
if (appScanner != null) {
return;
}
appScanner = new AppScanner(this);
}
// ---------------------------------------------------------------- proxetta
protected ProxyProxetta proxetta;
/**
* Returns proxetta.
*/
public ProxyProxetta getProxetta() {
return proxetta;
}
/**
* Creates Proxetta with all aspects. The following aspects are created:
* <ul>
* <li>Transaction proxy - applied on all classes that contains public top-level methods
* annotated with <code>@Transaction</code> annotation. This is just one way how proxies
* can be applied - since base configuration is in Java, everything is possible.</li>
* </ul>
*/
protected void startProxetta() {
log.info("proxetta initialization");
proxetta = ProxyProxetta.withAspects(createAppAspects());
}
/**
* Creates all application aspects. By default it creates just
* {@link #createTxProxyAspects() transactional aspect}.
*/
protected ProxyAspect[] createAppAspects() {
return new ProxyAspect[] {createTxProxyAspects()};
}
/**
* Creates TX aspect that will be applied on all classes
* having at least one public top-level method annotated
* with {@link #jtxAnnotations registered JTX annotations}.
*/
protected ProxyAspect createTxProxyAspects() {
return new ProxyAspect(
AnnotationTxAdvice.class,
new MethodAnnotationPointcut(jtxAnnotations) {
@Override
public boolean apply(MethodInfo methodInfo) {
return
isPublic(methodInfo) &&
isTopLevelMethod(methodInfo) &&
super.apply(methodInfo);
}
});
}
// ---------------------------------------------------------------- petite
protected PetiteContainer petite;
/**
* Returns application container (Petite).
*/
public PetiteContainer getPetite() {
return petite;
}
/**
* Creates and initializes Petite container.
* It will be auto-magically configured by scanning the classpath.
* Also, all 'app*.prop*' will be loaded and values will
* be injected in the matched beans. At the end it registers
* this instance of core into the container.
*/
protected void startPetite() {
log.info("petite initialization");
petite = createPetiteContainer();
log.info("app in web: " + Boolean.valueOf(isWebApplication));
if (isWebApplication == false) {
// make session scope to act as singleton scope
// if this is not a web application (and http session is not available).
petite.registerScope(SessionScope.class, new SingletonScope());
}
// load parameters from properties files
petite.defineParameters(appProps);
// adds a scanner bean, so it can be immediately configured from props
petite.addBean(PETITE_SCAN, appScanner);
// automagic configuration
registerPetiteContainerBeans(petite);
// add AppCore instance to Petite
petite.addBean(PETITE_CORE, this);
petite.addBean(PETITE_PROPS, appProps);
}
/**
* Configures Petite container. By default scans the class path
* for petite beans and registers them automagically.
*/
protected void registerPetiteContainerBeans(PetiteContainer petiteContainer) {
AutomagicPetiteConfigurator pcfg = new AutomagicPetiteConfigurator();
appScanner.configure(pcfg);
pcfg.configure(petiteContainer);
}
/**
* Creates Petite container. By default, it creates
* {@link jodd.petite.proxetta.ProxettaAwarePetiteContainer proxetta aware petite container}.
*/
protected PetiteContainer createPetiteContainer() {
return new ProxettaAwarePetiteContainer(proxetta);
}
/**
* Stops Petite container.
*/
protected void stopPetite() {
if (petite != null) {
petite.shutdown();
}
}
// ---------------------------------------------------------------- database
protected boolean useDatabase = true;
/**
* JTX manager.
*/
protected JtxTransactionManager jtxManager;
/**
* Returns JTX transaction manager.
*/
public JtxTransactionManager getJtxManager() {
return jtxManager;
}
/**
* Database connection provider.
*/
protected ConnectionProvider connectionProvider;
/**
* JTX annotations.
*/
protected Class<? extends Annotation>[] jtxAnnotations;
/**
* JTX scope pattern.
* @see AnnotationTxAdviceManager
*/
protected String jtxScopePattern;
/**
* Initializes database. First, creates connection pool.
* and transaction manager. Then, Jodds DbOomManager is
* configured. It is also configured automagically, by scanning
* the class path for entities.
*/
@SuppressWarnings("unchecked")
protected void startDb() {
if (!useDatabase) {
log.info("database is not used");
return;
}
log.info("database initialization");
// connection pool
Class<? extends ConnectionProvider> connectionProviderClass = getConnectionProviderType();
petite.registerPetiteBean(connectionProviderClass, PETITE_DBPOOL, null, null, false);
connectionProvider = (ConnectionProvider) petite.getBean(PETITE_DBPOOL);
connectionProvider.init();
checkConnectionProvider();
// transactions manager
jtxManager = createJtxTransactionManager(connectionProvider);
jtxManager.setValidateExistingTransaction(true);
AnnotationTxAdviceManager annTxAdviceManager = new AnnotationTxAdviceManager(jtxManager, jtxScopePattern);
annTxAdviceManager.registerAnnotations(jtxAnnotations);
AnnotationTxAdviceSupport.manager = annTxAdviceManager;
DbSessionProvider sessionProvider = new DbJtxSessionProvider(jtxManager);
// global settings
DbManager dbManager = DbManager.getInstance();
dbManager.setConnectionProvider(connectionProvider);
dbManager.setSessionProvider(sessionProvider);
petite.addBean(PETITE_DB, dbManager);
DbOomManager dbOomManager = DbOomManager.getInstance();
petite.addBean(PETITE_DBOOM, dbOomManager);
// automatic configuration
registerDbEntities(dbOomManager);
}
/**
* Registers DbOom entities. By default, scans the
* class path and register entities automagically.
*/
protected void registerDbEntities(DbOomManager dbOomManager) {
AutomagicDbOomConfigurator dbcfg = new AutomagicDbOomConfigurator();
appScanner.configure(dbcfg);
dbcfg.configure(dbOomManager);
}
/**
* Creates JTX transaction manager.
*/
protected JtxTransactionManager createJtxTransactionManager(ConnectionProvider connectionProvider) {
return new DbJtxTransactionManager(connectionProvider);
}
/**
* Returns <code>ConnectionProvider</code> implementation.
*/
protected Class<? extends ConnectionProvider> getConnectionProviderType() {
return CoreConnectionPool.class;
}
/**
* Checks if connection provider can return a connection.
*/
protected void checkConnectionProvider() {
Connection connection = connectionProvider.getConnection();
try {
DatabaseMetaData databaseMetaData = connection.getMetaData();
String name = databaseMetaData.getDatabaseProductName();
String version = databaseMetaData.getDatabaseProductVersion();
if (log.isInfoEnabled()) {
log.info("Connected to database: " + name + " v" + version);
}
} catch (SQLException sex) {
log.error("DB connection failed: ", sex);
} finally {
connectionProvider.closeConnection(connection);
}
}
/**
* Closes database resources at the end.
*/
protected void stopDb() {
if (!useDatabase) {
return;
}
if (log != null) {
log.info("database shutdown");
}
if (jtxManager != null) {
jtxManager.close();
}
if (connectionProvider != null) {
connectionProvider.close();
}
}
// ---------------------------------------------------------------- init
protected AppInit appInit;
/**
* Initializes business part of the application.
* Simply delegates to {@link AppInit#init()}.
*/
protected void startApp() {
appInit = (AppInit) petite.getBean(PETITE_INIT);
if (appInit != null) {
appInit.init();
}
}
/**
* Stops business part of the application.
* Simply delegates to {@link AppInit#stop()}.
*/
protected void stopApp() {
if (appInit != null) {
appInit.stop();
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.indexing.impl.storage;
import com.intellij.concurrency.ConcurrentCollectionFactory;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ConcurrentIntObjectMap;
import com.intellij.util.indexing.IdFilter;
import com.intellij.util.indexing.StorageException;
import com.intellij.util.io.DataOutputStream;
import com.intellij.util.io.*;
import com.intellij.util.io.keyStorage.AppendableObjectStorage;
import com.intellij.util.io.keyStorage.AppendableStorageBackedByResizableMappedFile;
import it.unimi.dsi.fastutil.ints.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.VisibleForTesting;
import java.io.*;
import java.nio.file.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A data structure to store key hashes to virtual file id mappings.
*/
class KeyHashLog<Key> implements Closeable {
private static final Logger LOG = Logger.getInstance(KeyHashLog.class);
private static final boolean ENABLE_CACHED_HASH_IDS = SystemProperties.getBooleanProperty("idea.index.cashed.hashids", true);
@NotNull
private final KeyDescriptor<Key> myKeyDescriptor;
@NotNull
private final Path myBaseStorageFile;
@NotNull
private final AppendableObjectStorage<int[]> myKeyHashToVirtualFileMapping;
@NotNull
private final ConcurrentIntObjectMap<Boolean> myInvalidatedSessionIds = ConcurrentCollectionFactory.createConcurrentIntObjectMap();
private volatile int myLastScannedId;
KeyHashLog(@NotNull KeyDescriptor<Key> descriptor, @NotNull Path baseStorageFile) throws IOException {
this(descriptor, baseStorageFile, true);
}
private KeyHashLog(@NotNull KeyDescriptor<Key> descriptor, @NotNull Path baseStorageFile, boolean compact) throws IOException {
myKeyDescriptor = descriptor;
myBaseStorageFile = baseStorageFile;
if (compact && isRequiresCompaction()) {
performCompaction();
}
myKeyHashToVirtualFileMapping =
openMapping(getDataFile(), 4096);
}
@NotNull
private static AppendableStorageBackedByResizableMappedFile<int[]> openMapping(@NotNull Path dataFile, int size) throws IOException {
return new AppendableStorageBackedByResizableMappedFile<>(dataFile,
size,
null,
PagedFileStorage.MB,
true,
IntPairInArrayKeyDescriptor.INSTANCE);
}
void addKeyHashToVirtualFileMapping(Key key, int inputId) throws StorageException {
appendKeyHashToVirtualFileMappingToLog(key, inputId);
}
void removeKeyHashToVirtualFileMapping(Key key, int inputId) throws StorageException {
appendKeyHashToVirtualFileMappingToLog(key, -inputId);
}
@Nullable
IntSet getSuitableKeyHashes(@NotNull IdFilter filter, @NotNull Project project) throws StorageException {
IdFilter.FilterScopeType filteringScopeType = filter.getFilteringScopeType();
if (filteringScopeType == IdFilter.FilterScopeType.OTHER) {
filteringScopeType = IdFilter.FilterScopeType.PROJECT_AND_LIBRARIES;
}
IntSet hashMaskSet = null;
long l = System.currentTimeMillis();
@NotNull Path sessionProjectCacheFile = getSavedProjectFileValueIds(myLastScannedId, filteringScopeType, project);
int id = myKeyHashToVirtualFileMapping.getCurrentLength();
final boolean useCachedHashIds = ENABLE_CACHED_HASH_IDS;
if (useCachedHashIds && id == myLastScannedId) {
if (myInvalidatedSessionIds.remove(id) == null) {
try {
hashMaskSet = loadProjectHashes(sessionProjectCacheFile);
}
catch (IOException ignored) {
}
}
}
if (hashMaskSet == null) {
if (useCachedHashIds && myLastScannedId != 0) {
try {
Files.delete(sessionProjectCacheFile);
}
catch (NoSuchFileException ignored) {
}
catch (IOException e) {
LOG.error(e);
}
}
hashMaskSet = getSuitableKeyHashes(filter);
if (useCachedHashIds) {
saveHashedIds(hashMaskSet, id, filteringScopeType, project);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanned keyHashToVirtualFileMapping of " + myBaseStorageFile + " for " + (System.currentTimeMillis() - l));
}
return hashMaskSet;
}
private void appendKeyHashToVirtualFileMappingToLog(Key key, int inputId) throws StorageException {
if (inputId == 0) return;
try {
withLock(() -> myKeyHashToVirtualFileMapping.append(new int[]{myKeyDescriptor.getHashCode(key), inputId}), false);
}
catch (IOException e) {
throw new StorageException(e);
}
invalidateKeyHashToVirtualFileMappingCache();
}
@NotNull
IntSet getSuitableKeyHashes(@NotNull IdFilter idFilter) throws StorageException {
try {
doForce();
Int2ObjectMap<IntSet> hash2inputIds = new Int2ObjectOpenHashMap<>(1000);
AtomicInteger uselessRecords = new AtomicInteger();
withLock(() -> {
ProgressManager.checkCanceled();
myKeyHashToVirtualFileMapping.processAll((offset, key) -> {
ProgressManager.checkCanceled();
int inputId = key[1];
int absInputId = Math.abs(inputId);
if (!idFilter.containsFileId(absInputId)) return true;
int keyHash = key[0];
if (inputId > 0) {
if (!hash2inputIds.computeIfAbsent(keyHash, __ -> new IntOpenHashSet()).add(inputId)) {
uselessRecords.incrementAndGet();
}
}
else {
IntSet inputIds = hash2inputIds.get(keyHash);
if (inputIds != null) {
inputIds.remove(absInputId);
if (inputIds.isEmpty()) {
hash2inputIds.remove(keyHash);
}
}
uselessRecords.incrementAndGet();
}
return true;
});
}, true);
if (uselessRecords.get() >= hash2inputIds.size()) {
setRequiresCompaction();
}
return hash2inputIds.keySet();
}
catch (IOException e) {
throw new StorageException(e);
}
}
void force() throws IOException {
if (myKeyHashToVirtualFileMapping.isDirty()) {
doForce();
}
}
private void doForce() throws IOException {
withLock(() -> myKeyHashToVirtualFileMapping.force(), false);
}
@Override
public void close() throws IOException {
withLock(() -> {
myKeyHashToVirtualFileMapping.close();
}, false);
}
private void performCompaction() throws IOException {
try {
Int2ObjectMap<IntSet> data = new Int2ObjectOpenHashMap<>();
Path oldDataFile = getDataFile();
AppendableStorageBackedByResizableMappedFile<int[]> oldMapping = openMapping(oldDataFile, 0);
oldMapping.lockRead();
try {
oldMapping.processAll((offset, key) -> {
int inputId = key[1];
int keyHash = key[0];
int absInputId = Math.abs(inputId);
if (inputId > 0) {
data.computeIfAbsent(keyHash, __ -> new IntOpenHashSet()).add(absInputId);
}
else {
IntSet associatedInputIds = data.get(keyHash);
if (associatedInputIds != null) {
associatedInputIds.remove(absInputId);
}
}
return true;
});
oldMapping.close();
}
finally {
oldMapping.unlockRead();
}
String dataFileName = oldDataFile.getFileName().toString();
String newDataFileName = "new." + dataFileName;
Path newDataFile = oldDataFile.resolveSibling(newDataFileName);
AppendableStorageBackedByResizableMappedFile<int[]> newMapping = openMapping(newDataFile, 32 * 2 * data.size());
newMapping.lockWrite();
try {
for (Int2ObjectMap.Entry<IntSet> entry : data.int2ObjectEntrySet()) {
int keyHash = entry.getIntKey();
IntIterator inputIdIterator = entry.getValue().iterator();
while (inputIdIterator.hasNext()) {
int inputId = inputIdIterator.nextInt();
newMapping.append(new int[]{keyHash, inputId});
}
}
newMapping.close();
}
finally {
newMapping.unlockWrite();
}
IOUtil.deleteAllFilesStartingWith(oldDataFile.toFile());
try (DirectoryStream<Path> paths = Files.newDirectoryStream(newDataFile.getParent())) {
for (Path path : paths) {
String name = path.getFileName().toString();
if (name.startsWith(newDataFileName)) {
FileUtil.rename(path.toFile(), dataFileName + name.substring(newDataFileName.length()));
}
}
}
try {
Files.delete(getCompactionMarker());
}
catch (IOException ignored) {}
} catch (ProcessCanceledException e) {
LOG.error(e);
throw e;
}
}
@NotNull
private static IntSet loadProjectHashes(@NotNull Path fileWithCaches) throws IOException {
try (DataInputStream inputStream = new DataInputStream(new BufferedInputStream(Files.newInputStream(fileWithCaches)))) {
int capacity = DataInputOutputUtil.readINT(inputStream);
IntSet hashMaskSet = new IntOpenHashSet(capacity);
while (capacity > 0) {
hashMaskSet.add(DataInputOutputUtil.readINT(inputStream));
--capacity;
}
return hashMaskSet;
}
}
private void saveHashedIds(@NotNull IntSet hashMaskSet, int largestId, @NotNull IdFilter.FilterScopeType scopeType, @NotNull Project project) {
@NotNull Path newFileWithCaches = getSavedProjectFileValueIds(largestId, scopeType, project);
boolean savedSuccessfully = true;
try (com.intellij.util.io.DataOutputStream stream = new DataOutputStream(new BufferedOutputStream(Files.newOutputStream(newFileWithCaches)))) {
DataInputOutputUtil.writeINT(stream, hashMaskSet.size());
IntIterator iterator = hashMaskSet.iterator();
while (iterator.hasNext()) {
DataInputOutputUtil.writeINT(stream, iterator.nextInt());
}
}
catch (IOException ignored) {
savedSuccessfully = false;
}
if (savedSuccessfully) {
myLastScannedId = largestId;
}
}
private static volatile Path mySessionDirectory;
private static final Object mySessionDirectoryLock = new Object();
private static Path getSessionDir() {
Path sessionDirectory = mySessionDirectory;
if (sessionDirectory == null) {
synchronized (mySessionDirectoryLock) {
sessionDirectory = mySessionDirectory;
if (sessionDirectory == null) {
try {
mySessionDirectory = sessionDirectory = FileUtil
.createTempDirectory(new File(PathManager.getTempPath()), Long.toString(System.currentTimeMillis()), "", true).toPath();
} catch (IOException ex) {
throw new RuntimeException("Can not create temp directory", ex);
}
}
}
}
return sessionDirectory;
}
@NotNull
private Path getSavedProjectFileValueIds(int id, @NotNull IdFilter.FilterScopeType scopeType, @NotNull Project project) {
return getSessionDir().resolve(getDataFile().getFileName().toString() + "." + project.hashCode() + "." + id + "." + scopeType.getId());
}
private void invalidateKeyHashToVirtualFileMappingCache() {
int lastScannedId = myLastScannedId;
if (lastScannedId != 0) { // we have write lock
myInvalidatedSessionIds.putIfAbsent(lastScannedId, Boolean.TRUE);
myLastScannedId = 0;
}
}
private <T extends Throwable> void withLock(ThrowableRunnable<T> r, boolean read) throws T {
if (read) {
myKeyHashToVirtualFileMapping.lockRead();
}
else {
myKeyHashToVirtualFileMapping.lockWrite();
}
try {
r.run();
} finally {
if (read) {
myKeyHashToVirtualFileMapping.unlockRead();
}
else {
myKeyHashToVirtualFileMapping.unlockWrite();
}
}
}
private void setRequiresCompaction() {
Path marker = getCompactionMarker();
if (Files.exists(marker)) {
return;
}
try {
Files.createDirectories(marker.getParent());
Files.createFile(marker);
}
catch (FileAlreadyExistsException ignored) { }
catch (IOException e) {
LOG.error(e);
}
}
@VisibleForTesting
boolean isRequiresCompaction() {
return Files.exists(getCompactionMarker());
}
@NotNull
private Path getCompactionMarker() {
Path dataFile = getDataFile();
return dataFile.resolveSibling(dataFile.getFileName().toString() + ".require.compaction");
}
@NotNull
private Path getDataFile() {
return myBaseStorageFile.resolveSibling(myBaseStorageFile.getFileName() + ".project");
}
private static class IntPairInArrayKeyDescriptor implements DataExternalizer<int[]> {
private static final IntPairInArrayKeyDescriptor INSTANCE = new IntPairInArrayKeyDescriptor();
@Override
public void save(@NotNull DataOutput out, int[] value) throws IOException {
DataInputOutputUtil.writeINT(out, value[0]);
DataInputOutputUtil.writeINT(out, value[1]);
}
@Override
public int[] read(@NotNull DataInput in) throws IOException {
return new int[] {DataInputOutputUtil.readINT(in), DataInputOutputUtil.readINT(in)};
}
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public static void main(String[] args) throws Exception {
String indexPath = args[0];
EnumeratorStringDescriptor enumeratorStringDescriptor = EnumeratorStringDescriptor.INSTANCE;
try (KeyHashLog<String> keyHashLog = new KeyHashLog<>(enumeratorStringDescriptor, Path.of(indexPath), false)) {
IntSet allHashes = keyHashLog.getSuitableKeyHashes(new IdFilter() {
@Override
public boolean containsFileId(int id) {
return true;
}
});
for (Integer hash : allHashes) {
System.out.println("key hash = " + hash);
}
}
}
}
| |
/*
* ice4j, the OpenSource Java Solution for NAT and Firewall Traversal.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ice4j.ice.harvest;
import java.io.*;
import java.lang.ref.*;
import java.net.*;
import java.nio.*;
import java.nio.channels.*;
import java.util.*;
import java.util.logging.*;
import org.ice4j.*;
import org.ice4j.attribute.*;
import org.ice4j.ice.*;
import org.ice4j.message.*;
import org.ice4j.socket.*;
/**
* A <tt>CandidateHarvester</tt> implementation, which listens on a specified
* list of TCP server sockets. On {@link #harvest(org.ice4j.ice.Component)}, a
* TCP candidate with type "passive" is added for each of the server sockets.
*
* This instance runs two threads: {@link #acceptThread} and
* {@link #readThread}. The 'accept' thread just accepts new <tt>Socket</tt>s
* and passes them over to the 'read' thread. The 'read' thread reads a STUN
* message from an accepted socket and, based on the STUN username, passes it
* to the appropriate <tt>Component</tt>.
*
* @author Boris Grozev
* @author Lyubomir Marinov
*/
public class TcpHarvester
extends CandidateHarvester
{
/**
* Our class logger.
*/
private static final Logger logger
= Logger.getLogger(TcpHarvester.class.getName());
/**
* The constant which specifies how often to perform purging on
* {@link #components}.
*/
private static final int PURGE_INTERVAL = 20;
/**
* Closes a {@code Channel} and swallows any {@link IOException}.
*
* @param channel the {@code Channel} to close
*/
static void closeNoExceptions(Channel channel)
{
MuxServerSocketChannelFactory.closeNoExceptions(channel);
}
/**
* Returns a list of all addresses on the interfaces in <tt>interfaces</tt>
* which are found suitable for candidate allocations (are not loopback, are
* up, and are allowed by the configuration).
*
* @param port the port to use.
* @param interfaces the list of interfaces to use.
*/
private static List<TransportAddress> getLocalAddresses(
int port,
List<NetworkInterface> interfaces)
throws IOException
{
List<TransportAddress> addresses = new LinkedList<TransportAddress>();
for (NetworkInterface iface : interfaces)
{
if (NetworkUtils.isInterfaceLoopback(iface)
|| !NetworkUtils.isInterfaceUp(iface)
|| !HostCandidateHarvester.isInterfaceAllowed(iface))
{
//this one is obviously not going to do
continue;
}
Enumeration<InetAddress> ifaceAddresses = iface.getInetAddresses();
while(ifaceAddresses.hasMoreElements())
{
InetAddress addr = ifaceAddresses.nextElement();
addresses.add(new TransportAddress(addr, port, Transport.TCP));
}
}
return addresses;
}
/**
* The thread which <tt>accept</tt>s TCP connections from the sockets in
* {@link #serverSocketChannels}.
*/
private AcceptThread acceptThread;
/**
* Triggers the termination of the threads of this instance.
*/
private boolean close = false;
/**
* Maps a local "ufrag" to the single <tt>Component</tt> instance with that
* "ufrag".
*
* We only keep weak references, because we do not want to prevent
* <tt>Component</tt>s from being freed.
*/
private final Map<String, WeakReference<Component>> components
= new HashMap<String, WeakReference<Component>>();
/**
* The list of transport addresses which we have found to be listening on,
* and which we will advertise as candidates in
* {@link #harvest(org.ice4j.ice.Component)}
*/
private final List<TransportAddress> localAddresses
= new LinkedList<TransportAddress>();
/**
* Maps a public address to a local address.
*/
private final Map<InetAddress, InetAddress> mappedAddresses
= new HashMap<InetAddress, InetAddress>();
/**
* Sets of additional ports, for which server reflexive candidates will be
* added.
*/
private final Set<Integer> mappedPorts = new HashSet<Integer>();
/**
* Channels pending to be added to the list that {@link #readThread} reads
* from.
*/
private final List<SocketChannel> newChannels
= new LinkedList<SocketChannel>();
/**
* A counter used to decide when to purge {@link #components}.
*/
private int purgeCounter = 0;
/**
* The <tt>Selector</tt> used by {@link #readThread}.
*/
private final Selector readSelector = Selector.open();
/**
* The thread which reads from the already <tt>accept</tt>ed sockets.
*/
private ReadThread readThread;
/**
* The list of <tt>ServerSocketChannel</tt>s that we will <tt>accept</tt>
* on.
*/
private final List<ServerSocketChannel> serverSocketChannels
= new LinkedList<ServerSocketChannel>();
/**
* Whether or not to use ssltcp.
*/
private final boolean ssltcp;
/**
* Initializes a new <tt>TcpHarvester</tt>, which is to
* listen on port number <tt>port</tt> on all IP addresses on all available
* interfaces.
*
* @param port the port to listen on.
*/
public TcpHarvester(int port)
throws IOException
{
this(port, /* ssltcp */ false);
}
/**
* Initializes a new <tt>TcpHarvester</tt>, which is to
* listen on port number <tt>port</tt> on all IP addresses on all available
* interfaces.
*
* @param port the port to listen on.
* @param ssltcp <tt>true</tt> to use ssltcp; otherwise, <tt>false</tt>
*/
public TcpHarvester(int port, boolean ssltcp)
throws IOException
{
this(port,
Collections.list(NetworkInterface.getNetworkInterfaces()),
ssltcp);
}
/**
* Initializes a new <tt>TcpHarvester</tt>, which is to
* listen on port number <tt>port</tt> on all the IP addresses on the
* specified <tt>NetworkInterface</tt>s.
*
* @param port the port to listen on.
* @param interfaces the interfaces to listen on.
* @param ssltcp <tt>true</tt> to use ssltcp; otherwise, <tt>false</tt>
*/
public TcpHarvester(int port,
List<NetworkInterface> interfaces,
boolean ssltcp)
throws IOException
{
this(getLocalAddresses(port, interfaces), ssltcp);
}
/**
* Initializes a new <tt>TcpHarvester</tt>, which is to
* listen on the specified list of <tt>TransportAddress</tt>es.
*
* @param transportAddresses the transport addresses to listen on.
*/
public TcpHarvester(
List<TransportAddress> transportAddresses)
throws IOException
{
this(transportAddresses, /* ssltcp */ false);
}
/**
* Initializes a new <tt>TcpHarvester</tt>, which is to
* listen on the specified list of <tt>TransportAddress</tt>es.
*
* @param transportAddresses the transport addresses to listen on.
* @param ssltcp <tt>true</tt> to use ssltcp; otherwise, <tt>false</tt>
*/
public TcpHarvester(
List<TransportAddress> transportAddresses,
boolean ssltcp)
throws IOException
{
this.ssltcp = ssltcp;
addLocalAddresses(transportAddresses);
init();
}
/**
* Adds to {@link #localAddresses} those addresses from
* <tt>transportAddresses</tt> which are found suitable for candidate
* allocation.
*
* @param transportAddresses the list of addresses to add.
*/
private void addLocalAddresses(List<TransportAddress> transportAddresses)
throws IOException
{
boolean useIPv6 = !StackProperties.getBoolean(
StackProperties.DISABLE_IPv6,
false);
boolean useIPv6LinkLocal = !StackProperties.getBoolean(
StackProperties.DISABLE_LINK_LOCAL_ADDRESSES,
false);
// White list from the configuration
String[] allowedAddressesStr
= StackProperties.getStringArray(StackProperties.ALLOWED_ADDRESSES,
";");
InetAddress[] allowedAddresses = null;
if (allowedAddressesStr != null)
{
allowedAddresses = new InetAddress[allowedAddressesStr.length];
for (int i = 0; i < allowedAddressesStr.length; i++)
{
allowedAddresses[i]
= InetAddress.getByName(allowedAddressesStr[i]);
}
}
// Black list from the configuration
String[] blockedAddressesStr
= StackProperties.getStringArray(StackProperties.BLOCKED_ADDRESSES,
";");
InetAddress[] blockedAddresses = null;
if (blockedAddressesStr != null)
{
blockedAddresses = new InetAddress[blockedAddressesStr.length];
for (int i = 0; i < blockedAddressesStr.length; i++)
{
blockedAddresses[i]
= InetAddress.getByName(blockedAddressesStr[i]);
}
}
for (TransportAddress transportAddress : transportAddresses)
{
InetAddress address = transportAddress.getAddress();
if (address.isLoopbackAddress())
{
//loopback again
continue;
}
if (!useIPv6 && (address instanceof Inet6Address))
continue;
if (!useIPv6LinkLocal
&& (address instanceof Inet6Address)
&& address.isLinkLocalAddress())
{
logger.info("Not using link-local address " + address +" for"
+ " TCP candidates.");
continue;
}
if (allowedAddresses != null)
{
boolean found = false;
for (InetAddress allowedAddress : allowedAddresses)
{
if (allowedAddress.equals(address))
{
found = true;
break;
}
}
if (!found)
{
logger.info("Not using " + address +" for TCP candidates, "
+ "because it is not in the allowed list.");
continue;
}
}
if (blockedAddresses != null)
{
boolean found = false;
for (InetAddress blockedAddress : blockedAddresses)
{
if (blockedAddress.equals(address))
{
found = true;
break;
}
}
if (found)
{
logger.info("Not using " + address + " for TCP candidates, "
+ "because it is in the blocked list.");
continue;
}
}
// Passed all checks
localAddresses.add(transportAddress);
}
}
/**
* Adds a mapping between <tt>publicAddress</tt> and <tt>localAddress</tt>.
* This means that on harvest, along with any host candidates that have
* <tt>publicAddress</tt>, a server reflexive candidate will be added (with
* the same port as the host candidate).
*
* @param publicAddress the public address.
* @param localAddress the local address.
*/
public void addMappedAddress(InetAddress publicAddress,
InetAddress localAddress)
{
mappedAddresses.put(publicAddress, localAddress);
}
/**
* Adds port as an additional port. When harvesting, additional server
* reflexive candidates will be added with this port.
*
* @param port the port to add.
*/
public void addMappedPort(int port)
{
mappedPorts.add(port);
}
/**
* Triggers the termination of the threads of this
* <tt>MultiplexingTcpHarvester</tt>.
*/
public void close()
{
close = true;
}
/**
* Creates and returns the list of <tt>LocalCandidate</tt>s which are to be
* added by this <tt>TcpHarvester</tt> to a specific
* <tt>Component</tt>.
*
* @param component the <tt>Component</tt> for which to create candidates.
* @return the list of <tt>LocalCandidate</tt>s which are to be added by
* this <tt>TcpHarvester</tt> to a specific
* <tt>Component</tt>.
*/
private List<LocalCandidate> createLocalCandidates(Component component)
{
List<TcpHostCandidate> hostCandidates
= new LinkedList<TcpHostCandidate>();
// Add the host candidates for the addresses we really listen on
for (TransportAddress transportAddress : localAddresses)
{
TcpHostCandidate candidate
= new TcpHostCandidate(transportAddress, component);
candidate.setTcpType(CandidateTcpType.PASSIVE);
if (ssltcp)
candidate.setSSL(true);
hostCandidates.add(candidate);
}
// Add srflx candidates for any mapped addresses
List<LocalCandidate> mappedCandidates
= new LinkedList<LocalCandidate>();
for (Map.Entry<InetAddress, InetAddress> mapping
: mappedAddresses.entrySet())
{
InetAddress localAddress = mapping.getValue();
for (TcpHostCandidate base : hostCandidates)
{
TransportAddress baseTransportAddress
= base.getTransportAddress();
if (localAddress.equals(baseTransportAddress.getAddress()))
{
InetAddress publicAddress = mapping.getKey();
ServerReflexiveCandidate mappedCandidate
= new ServerReflexiveCandidate(
new TransportAddress(publicAddress,
baseTransportAddress.getPort(),
Transport.TCP),
base,
base.getStunServerAddress(),
CandidateExtendedType.STATICALLY_MAPPED_CANDIDATE);
if (base.isSSL())
mappedCandidate.setSSL(true);
mappedCandidate.setTcpType(CandidateTcpType.PASSIVE);
mappedCandidates.add(mappedCandidate);
}
}
}
// Add srflx candidates for mapped ports
List<LocalCandidate> portMappedCandidates
= new LinkedList<LocalCandidate>();
for (TcpHostCandidate base : hostCandidates)
{
for (Integer port : mappedPorts)
{
ServerReflexiveCandidate portMappedCandidate
= new ServerReflexiveCandidate(
new TransportAddress(
base.getTransportAddress().getAddress(),
port,
Transport.TCP),
base,
base.getStunServerAddress(),
CandidateExtendedType.STATICALLY_MAPPED_CANDIDATE);
if (base.isSSL())
portMappedCandidate.setSSL(true);
portMappedCandidate.setTcpType(CandidateTcpType.PASSIVE);
portMappedCandidates.add(portMappedCandidate);
}
}
// Mapped ports for mapped addresses
for (LocalCandidate mappedCandidate : mappedCandidates)
{
TcpHostCandidate base
= (TcpHostCandidate) mappedCandidate.getBase();
for (Integer port : mappedPorts)
{
ServerReflexiveCandidate portMappedCandidate
= new ServerReflexiveCandidate(
new TransportAddress(
mappedCandidate.getTransportAddress()
.getAddress(),
port,
Transport.TCP),
base,
base.getStunServerAddress(),
CandidateExtendedType.STATICALLY_MAPPED_CANDIDATE);
if (base.isSSL())
portMappedCandidate.setSSL(true);
portMappedCandidate.setTcpType(CandidateTcpType.PASSIVE);
portMappedCandidates.add(portMappedCandidate);
}
}
LinkedList<LocalCandidate> allCandidates
= new LinkedList<LocalCandidate>();
allCandidates.addAll(hostCandidates);
allCandidates.addAll(mappedCandidates);
allCandidates.addAll(portMappedCandidates);
return allCandidates;
}
/**
* Returns the <tt>Component</tt> instance, if any, for a given local
* "ufrag".
*
* @param localUfrag the local "ufrag"
* @return the <tt>Component</tt> instance, if any, for a given local
* "ufrag".
*/
private Component getComponent(String localUfrag)
{
synchronized (components)
{
WeakReference<Component> wr = components.get(localUfrag);
if (wr != null)
{
Component component = wr.get();
if (component == null)
{
components.remove(localUfrag);
}
return component;
}
return null;
}
}
/**
* {@inheritDoc}
*
* Saves a (weak) reference to <tt>Component</tt>, so that it can be
* notified if/when a socket for one of it <tt>LocalCandidate</tt>s is
* accepted.
* <p>
* The method does not perform any network operations and should return
* quickly.
* </p>
*/
@Override
public Collection<LocalCandidate> harvest(Component component)
{
IceMediaStream stream = component.getParentStream();
Agent agent = stream.getParentAgent();
if (stream.getComponentCount() != 1 || agent.getStreamCount() != 1)
{
/*
* TcpHarvester only works with streams with a
* single component, and agents with a single stream. This is
* because we use the local "ufrag" to de-multiplex the accept()-ed
* sockets between the known components.
*/
logger.info(
"More than one Component for an Agent, cannot harvest.");
return new LinkedList<LocalCandidate>();
}
List<LocalCandidate> candidates = createLocalCandidates(component);
for (LocalCandidate candidate : candidates)
component.addLocalCandidate(candidate);
synchronized (components)
{
components.put(agent.getLocalUfrag(),
new WeakReference<Component>(component));
purgeComponents();
}
return candidates;
}
/**
* Initializes {@link #serverSocketChannels}, creates and starts the threads
* used by this instance.
*/
private void init()
throws IOException
{
for (TransportAddress transportAddress : localAddresses)
{
ServerSocketChannel channel
= MuxServerSocketChannelFactory
.openAndBindMuxServerSocketChannel(
/* properties */ null,
new InetSocketAddress(
transportAddress.getAddress(),
transportAddress.getPort()),
/* backlog */ 0,
new DatagramPacketFilter()
{
/**
* {@inheritDoc}
*/
@Override
public boolean accept(DatagramPacket p)
{
return isFirstDatagramPacket(p);
}
});
serverSocketChannels.add(channel);
}
acceptThread = new AcceptThread();
acceptThread.start();
readThread = new ReadThread();
readThread.start();
}
/**
* Determines whether a specific {@link DatagramPacket} is the first
* expected (i.e. supported) to be received from an accepted
* {@link SocketChannel} by this {@code TcpHarvester}. If
* {@link #ssltcp} signals that Google TURN SSLTCP is to be expected, then
* {@code p} must be
* {@link GoogleTurnSSLCandidateHarvester#SSL_CLIENT_HANDSHAKE}. Otherwise,
* it must be a STUN Binding request packetized for TCP.
*
* @param p the {@code DatagramPacket} to examine
* @return {@code true} if {@code p} looks like the first
* {@code DatagramPacket} expected to be received from an accepted
* {@code SocketChannel} by this {@code TcpHarvester};
* otherwise, {@code false}
*/
private boolean isFirstDatagramPacket(DatagramPacket p)
{
int len = p.getLength();
boolean b = false;
if (len > 0)
{
byte[] buf = p.getData();
int off = p.getOffset();
if (ssltcp)
{
// Google TURN SSLTCP
final byte[] googleTurnSslTcp
= GoogleTurnSSLCandidateHarvester.SSL_CLIENT_HANDSHAKE;
if (len >= googleTurnSslTcp.length)
{
b = true;
for (int i = 0, iEnd = googleTurnSslTcp.length, j = off;
i < iEnd;
i++, j++)
{
if (googleTurnSslTcp[i] != buf[j])
{
b = false;
break;
}
}
}
}
else
{
// 2 bytes uint16 length
// STUN Binding request:
// 2 bits 00
// 14 bits STUN Messsage Type
// 2 bytes Message Length
// 4 bytes Magic Cookie
// RFC 5389: For example, a Binding request has class=0b00
// (request) and method=0b000000000001 (Binding) and is encoded
// into the first 16 bits as 0x0001.
if (len >= 10 && buf[off + 2] == 0 && buf[off + 3] == 1)
{
final byte[] magicCookie = Message.MAGIC_COOKIE;
b = true;
for (int i = 0, iEnd = magicCookie.length, j = off + 6;
i < iEnd;
i++, j++)
{
if (magicCookie[i] != buf[j])
{
b = false;
break;
}
}
}
}
}
return b;
}
/**
* Removes entries from {@link #components} for which the
* <tt>WeakReference</tt> has been cleared.
*/
private void purgeComponents()
{
++purgeCounter;
if (purgeCounter % PURGE_INTERVAL == 0)
{
synchronized (components)
{
for (Iterator<WeakReference<Component>> i
= components.values().iterator();
i.hasNext();)
{
if (i.next().get() == null)
i.remove();
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isHostHarvester()
{
return true;
}
/**
* A <tt>Thread</tt> which will accept new <tt>SocketChannel</tt>s from all
* <tt>ServerSocketChannel</tt>s in {@link #serverSocketChannels}.
*/
private class AcceptThread
extends Thread
{
/**
* The <tt>Selector</tt> used to select a specific
* <tt>ServerSocketChannel</tt> which is ready to <tt>accept</tt>.
*/
private final Selector selector;
/**
* Initializes a new <tt>AcceptThread</tt>.
*/
public AcceptThread()
throws IOException
{
setName("TcpHarvester AcceptThread");
setDaemon(true);
selector = Selector.open();
for (ServerSocketChannel channel : serverSocketChannels)
{
channel.configureBlocking(false);
channel.register(selector, SelectionKey.OP_ACCEPT);
}
}
/**
* Notifies {@link #readThread} that new channels have been added to
*/
private void notifyReadThread()
{
readSelector.wakeup();
}
/**
* {@inheritDoc}
*/
@Override
public void run()
{
do
{
if (close)
{
break;
}
IOException exception = null;
List<SocketChannel> channelsToAdd
= new LinkedList<SocketChannel>();
// Allow to go on, so we can quit if closed.
long selectTimeout = 3000;
for (SelectionKey key : selector.keys())
{
if (key.isValid())
{
SocketChannel channel;
boolean acceptable = key.isAcceptable();
try
{
channel
= ((ServerSocketChannel) key.channel())
.accept();
}
catch (IOException ioe)
{
exception = ioe;
break;
}
// Add the accepted channel to newChannels to allow the
// 'read' thread to it up.
if (channel != null)
{
channelsToAdd.add(channel);
}
else if (acceptable)
{
// The SelectionKey reported the channel as
// acceptable but channel#accept() did not accept a
// non-null SocketChannel. Give the channel a little
// time to get its act together.
selectTimeout = 100;
}
}
}
// We accepted from all serverSocketChannels.
selector.selectedKeys().clear();
if (!channelsToAdd.isEmpty())
{
synchronized (newChannels)
{
newChannels.addAll(channelsToAdd);
}
notifyReadThread();
}
if (exception != null)
{
logger.info(
"Failed to accept a socket, which should have been"
+ " ready to accept: " + exception);
break;
}
try
{
// Allow to go on, so we can quit if closed.
selector.select(selectTimeout);
}
catch (IOException ioe)
{
logger.info(
"Failed to select an accept-ready socket: " + ioe);
break;
}
}
while (true);
//now clean up and exit
for (ServerSocketChannel serverSocketChannel : serverSocketChannels)
closeNoExceptions(serverSocketChannel);
try
{
selector.close();
}
catch (IOException ioe)
{}
}
}
/**
* Contains a <tt>SocketChannel</tt> that <tt>ReadThread</tt> is reading
* from.
*/
private static class ChannelDesc
{
/**
* The actual <tt>SocketChannel</tt>.
*/
public final SocketChannel channel;
/**
* The time the channel was last found to be active.
*/
long lastActive = System.currentTimeMillis();
/**
* The buffer which stores the data so far read from the channel.
*/
ByteBuffer buffer = null;
/**
* Whether or not the initial "pseudo" SSL handshake has been read.
*/
boolean sslHandshakeRead = false;
/**
* The value of the RFC4571 "length" field read from the channel, or
* -1 if it hasn't been read (yet).
*/
int length = -1;
/**
* Initializes a new <tt>ChannelDesc</tt> with the given channel.
* @param channel the channel.
*/
public ChannelDesc(SocketChannel channel)
{
this.channel = channel;
}
}
/**
* An <tt>IceSocketWrapper</tt> implementation which allows a
* <tt>DatagramPacket</tt> to be pushed back and received on the first call
* to {@link #receive(java.net.DatagramPacket)}.
*/
private static class PushBackIceSocketWrapper
extends IceSocketWrapper
{
/**
* The <tt>DatagramPacket</tt> which will be used on the first call to
* {@link #receive(java.net.DatagramPacket)}.
*/
private DatagramPacket datagramPacket;
/**
* The <tt>IceSocketWrapper</tt> that this instance wraps around.
*/
private final IceSocketWrapper wrapped;
/**
* Initializes a new <tt>PushBackIceSocketWrapper</tt> instance that
* wraps around <tt>wrappedWrapper</tt> and reads from
* <tt>datagramSocket</tt> on the first call to
* {@link #receive(java.net.DatagramPacket)}
*
* @param wrappedWrapper the <tt>IceSocketWrapper</tt> instance that we
* wrap around.
* @param datagramPacket the <tt>DatagramPacket</tt> which will be used
* on the first call to {@link #receive(java.net.DatagramPacket)}
*/
public PushBackIceSocketWrapper(IceSocketWrapper wrappedWrapper,
DatagramPacket datagramPacket)
{
this.wrapped = wrappedWrapper;
this.datagramPacket = datagramPacket;
}
/**
* {@inheritDoc}
*/
@Override
public void close()
{
wrapped.close();
}
/**
* {@inheritDoc}
*/
@Override
public InetAddress getLocalAddress()
{
return wrapped.getLocalAddress();
}
/**
* {@inheritDoc}
*/
@Override
public int getLocalPort()
{
return wrapped.getLocalPort();
}
/**
* {@inheritDoc}
*/
@Override
public SocketAddress getLocalSocketAddress()
{
return wrapped.getLocalSocketAddress();
}
/**
* {@inheritDoc}
*/
@Override
public Socket getTCPSocket()
{
return wrapped.getTCPSocket();
}
/**
* {@inheritDoc}
*/
@Override
public DatagramSocket getUDPSocket()
{
return wrapped.getUDPSocket();
}
/**
* {@inheritDoc}
*
* On the first call to this instance reads from
* {@link #datagramPacket}, on subsequent calls delegates to
* {@link #wrapped}.
*/
@Override
public void receive(DatagramPacket p) throws IOException
{
if (datagramPacket != null)
{
int len = Math.min(p.getLength(), datagramPacket.getLength());
System.arraycopy(datagramPacket.getData(), 0,
p.getData(), 0,
len);
p.setAddress(datagramPacket.getAddress());
p.setPort(datagramPacket.getPort());
datagramPacket = null;
}
else
{
wrapped.receive(p);
}
}
/**
* {@inheritDoc}
*/
@Override
public void send(DatagramPacket p) throws IOException
{
wrapped.send(p);
}
}
private class ReadThread
extends Thread
{
/**
* Initializes a new <tt>ReadThread</tt>.
*
* @throws IOException if the selector to be used fails to open.
*/
public ReadThread()
throws IOException
{
setName("TcpHarvester ReadThread");
setDaemon(true);
}
/**
* Adds the channels from {@link #newChannels} to {@link #channels} and
* registers them in {@link #readSelector}.
*/
private void checkForNewChannels()
{
synchronized (newChannels)
{
for (SocketChannel channel : newChannels)
{
try
{
channel.configureBlocking(false);
channel.register(
readSelector,
SelectionKey.OP_READ,
new ChannelDesc(channel));
}
catch (IOException ioe)
{
logger.info("Failed to register channel: " + ioe);
closeNoExceptions(channel);
}
}
newChannels.clear();
}
}
/**
* Checks {@link #channels} for channels which have been added over
* {@link #READ_TIMEOUT} milliseconds ago and closes them.
*/
private void cleanup()
{
long now = System.currentTimeMillis();
for (SelectionKey key : readSelector.keys())
{
// An invalid key specifies that either the channel was closed
// (in which case we do not have to do anything else to it) or
// that we no longer control the channel (i.e. we do not want to
// do anything else to it).
if (!key.isValid())
continue;
ChannelDesc channelDesc = (ChannelDesc) key.attachment();
if (channelDesc == null)
continue;
long lastActive = channelDesc.lastActive;
if (lastActive != -1
&& now - lastActive
> MuxServerSocketChannelFactory
.SOCKET_CHANNEL_READ_TIMEOUT)
{
// De-register from the Selector.
key.cancel();
SocketChannel channel = channelDesc.channel;
logger.info("Read timeout for socket: " + channel.socket());
closeNoExceptions(channel);
}
}
}
/**
* Searches among the local candidates of <tt>Component</tt> for a
* <tt>TcpHostCandidate</tt> with the same transport address as the
* local transport address of <tt>socket</tt>.
*
* We expect to find such a candidate, which has been added by this
* <tt>TcpHarvester</tt> while harvesting.
*
* @param component the <tt>Component</tt> to search.
* @param socket the <tt>Socket</tt> to match the local transport
* address of.
* @return a <tt>TcpHostCandidate</tt> among the local candidates of
* <tt>Component</tt> with the same transport address as the local
* address of <tt>Socket</tt>, or <tt>null</tt> if no such candidate
* exists.
*/
private TcpHostCandidate findCandidate(
Component component,
Socket socket)
{
InetAddress localAddress = socket.getLocalAddress();
int localPort = socket.getLocalPort();
for (LocalCandidate candidate : component.getLocalCandidates())
{
TransportAddress transportAddress
= candidate.getTransportAddress();
if (candidate instanceof TcpHostCandidate
&& Transport.TCP.equals(transportAddress.getTransport())
&& localPort == transportAddress.getPort()
&& localAddress.equals(transportAddress.getAddress()))
{
return (TcpHostCandidate) candidate;
}
}
return null;
}
/**
* Makes <tt>socket</tt> available to <tt>component</tt> and pushes back
* <tt>datagramPacket</tt> into the STUN socket.
*
* @param socket the <tt>Socket</tt>.
* @param component the <tt>Component</tt>.
* @param datagramPacket the <tt>DatagramPacket</tt> to push back.
*/
private void handSocketToComponent(Socket socket,
Component component,
DatagramPacket datagramPacket)
{
IceProcessingState state
= component.getParentStream().getParentAgent().getState();
if (!IceProcessingState.WAITING.equals(state)
&& !IceProcessingState.RUNNING.equals(state))
{
logger.info(
"Not adding a socket to an ICE agent with state "
+ state);
return;
}
// Socket to add to the candidate
IceSocketWrapper candidateSocket = null;
// STUN-only filtered socket to add to the StunStack
IceSocketWrapper stunSocket = null;
try
{
MultiplexingSocket multiplexing = new MultiplexingSocket(socket);
candidateSocket = new IceTcpSocketWrapper(multiplexing);
stunSocket
= new IceTcpSocketWrapper(
multiplexing.getSocket(new StunDatagramPacketFilter()));
stunSocket
= new PushBackIceSocketWrapper(stunSocket, datagramPacket);
}
catch (IOException ioe)
{
logger.info("Failed to create sockets: " + ioe);
}
TcpHostCandidate candidate = findCandidate(component, socket);
if (candidate != null)
{
component.getParentStream().getParentAgent().getStunStack()
.addSocket(stunSocket);
candidate.addSocket(candidateSocket);
// the socket is not our responsibility anymore. It is up to
// the candidate/component to close/free it.
}
else
{
logger.info("Failed to find the local candidate for socket: "
+ socket);
try
{
socket.close();
}
catch (IOException ioe)
{}
}
}
/**
* Tries to read, without blocking, from <tt>channel</tt> to its
* buffer. If after reading the buffer is filled, handles the data in
* the buffer.
*
* This works in three stages:
* 1 (optional, only if ssltcp is enabled): Read a fixed-size message.
* If it matches the hard-coded pseudo SSL ClientHello, sends the
* hard-coded ServerHello.
* 2: Read two bytes as an unsigned int and interpret it as the length
* to read in the next stage.
* 3: Read number of bytes indicated in stage2 and try to interpret
* them as a STUN message.
*
* If a STUN message is successfully read, and it contains a USERNAME
* attribute, the local "ufrag" is extracted from the
* attribute value and the socket is passed on to the <tt>Component</tt>
* that this <tt>TcpHarvester</tt> has associated with
* that "ufrag".
*
* @param channel the <tt>SocketChannel</tt> to read from.
* @param key the <tt>SelectionKey</tt> associated with
* <tt>channel</tt>, which is to be canceled in case no further
* reading is required from the channel.
*/
private void readFromChannel(ChannelDesc channel, SelectionKey key)
{
if (channel.buffer == null)
{
// Set up a buffer with a pre-determined size
if (ssltcp && !channel.sslHandshakeRead)
{
channel.buffer
= ByteBuffer.allocate(
GoogleTurnSSLCandidateHarvester
.SSL_CLIENT_HANDSHAKE.length);
}
else if (channel.length == -1)
{
channel.buffer = ByteBuffer.allocate(2);
}
else
{
channel.buffer = ByteBuffer.allocate(channel.length);
}
}
try
{
int read = channel.channel.read(channel.buffer);
if (read == -1)
throw new IOException("End of stream!");
else if (read > 0)
channel.lastActive = System.currentTimeMillis();
if (!channel.buffer.hasRemaining())
{
// We've filled in the buffer.
if (ssltcp && !channel.sslHandshakeRead)
{
byte[] bytesRead
= new byte[GoogleTurnSSLCandidateHarvester
.SSL_CLIENT_HANDSHAKE.length];
channel.buffer.flip();
channel.buffer.get(bytesRead);
// Set to null, so that we re-allocate it for the next
// stage
channel.buffer = null;
channel.sslHandshakeRead = true;
if (Arrays.equals(bytesRead,
GoogleTurnSSLCandidateHarvester
.SSL_CLIENT_HANDSHAKE))
{
ByteBuffer byteBuffer = ByteBuffer.wrap(
GoogleTurnSSLCandidateHarvester
.SSL_SERVER_HANDSHAKE);
channel.channel.write(byteBuffer);
}
else
{
throw new IOException("Expected a pseudo ssl"
+ " handshake, but received something else.");
}
}
else if (channel.length == -1)
{
channel.buffer.flip();
int fb = channel.buffer.get();
int sb = channel.buffer.get();
channel.length = (((fb & 0xff) << 8) | (sb & 0xff));
// Set to null, so that we re-allocate it for the next
// stage
channel.buffer = null;
}
else
{
byte[] bytesRead = new byte[channel.length];
channel.buffer.flip();
channel.buffer.get(bytesRead);
// Does this look like a STUN binding request?
// What's the username?
Message stunMessage
= Message.decode(bytesRead,
(char) 0,
(char) bytesRead.length);
if (stunMessage.getMessageType()
!= Message.BINDING_REQUEST)
{
throw new IOException("Not a binding request");
}
UsernameAttribute usernameAttribute
= (UsernameAttribute)
stunMessage.getAttribute(Attribute.USERNAME);
if (usernameAttribute == null)
{
throw new IOException(
"No USERNAME attribute present.");
}
String usernameString
= new String(usernameAttribute.getUsername());
String localUfrag = usernameString.split(":")[0];
Component component = getComponent(localUfrag);
if (component == null)
throw new IOException("No component found.");
// The rest of the stack will read from the socket's
// InputStream. We cannot change the blocking mode
// before the channel is removed from the selector (by
// cancelling the key)
key.cancel();
channel.channel.configureBlocking(true);
// Construct a DatagramPacket from the just-read packet
// which is to be pushed back
DatagramPacket p
= new DatagramPacket(bytesRead, bytesRead.length);
Socket socket = channel.channel.socket();
p.setAddress(socket.getInetAddress());
p.setPort(socket.getPort());
handSocketToComponent(socket, component, p);
}
}
}
catch (IOException ioe)
{
logger.info(
"Failed to handle TCP socket "
+ channel.channel.socket() + ": " + ioe);
key.cancel();
closeNoExceptions(channel.channel);
}
catch (StunException se)
{
logger.info(
"Failed to handle TCP socket "
+ channel.channel.socket() + ": " + se);
key.cancel();
closeNoExceptions(channel.channel);
}
}
/**
* {@inheritDoc}
*/
@Override
public void run()
{
do
{
synchronized (TcpHarvester.this)
{
if (close)
break;
}
// clean up stale channels
cleanup();
checkForNewChannels();
for (SelectionKey key : readSelector.keys())
{
if (key.isValid())
{
ChannelDesc channelDesc
= (ChannelDesc) key.attachment();
readFromChannel(channelDesc, key);
}
}
// We read from all SocketChannels.
readSelector.selectedKeys().clear();
try
{
readSelector.select(
MuxServerSocketChannelFactory
.SOCKET_CHANNEL_READ_TIMEOUT
/ 2);
}
catch (IOException ioe)
{
logger.info("Failed to select a read-ready channel.");
}
}
while (true);
//we are all done, clean up.
synchronized (newChannels)
{
for (SocketChannel channel : newChannels)
{
closeNoExceptions(channel);
}
newChannels.clear();
}
for (SelectionKey key : readSelector.keys())
{
// An invalid key specifies that either the channel was closed
// (in which case we do not have to do anything else to it) or
// that we no longer control the channel (i.e. we do not want to
// do anything else to it).
if (key.isValid())
{
Channel channel = key.channel();
if (channel.isOpen())
closeNoExceptions(channel);
}
}
try
{
readSelector.close();
}
catch (IOException ioe)
{}
}
}
}
| |
/**
* Copyright 2007-2008 University Of Southern California
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.pegasus.planner.code.generator.condor.style;
import edu.isi.pegasus.common.credential.CredentialHandler;
import edu.isi.pegasus.common.credential.CredentialHandlerFactory;
import edu.isi.pegasus.common.logging.LogManager;
import edu.isi.pegasus.common.util.ShellCommand;
import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry;
import edu.isi.pegasus.planner.catalog.site.classes.SiteStore;
import edu.isi.pegasus.planner.classes.AggregatedJob;
import edu.isi.pegasus.planner.classes.Job;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.code.generator.condor.CondorStyle;
import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException;
import edu.isi.pegasus.planner.code.generator.condor.CondorStyleFactoryException;
import edu.isi.pegasus.planner.common.PegasusProperties;
import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* An abstract implementation of the CondorStyle interface. Implements the initialization method.
*
* @author Karan Vahi
* @version $Revision$
*/
public abstract class Abstract implements CondorStyle {
/** The object holding all the properties pertaining to Pegasus. */
protected PegasusProperties mProps;
/** The handle to the Site Catalog Store. */
protected SiteStore mSiteStore;
/** A handle to the logging object. */
protected LogManager mLogger;
/** Handle to the Credential Handler Factory */
protected CredentialHandlerFactory mCredentialFactory;
protected List<String> mMountUnderScratchDirs;
/** The default constructor. */
public Abstract() {
// mLogger = LogManager.getInstance();
}
/**
* Initializes the Code Style implementation.
*
* @param bag the bag of initialization objects
* @param credentialFactory the credential handler factory
* @throws CondorStyleFactoryException that nests any error that might occur during the
* instantiation of the implementation.
*/
public void initialize(PegasusBag bag, CredentialHandlerFactory credentialFactory)
throws CondorStyleException {
mProps = bag.getPegasusProperties();
mSiteStore = bag.getHandleToSiteStore();
mLogger = bag.getLogger();
mCredentialFactory = credentialFactory;
mMountUnderScratchDirs = new LinkedList();
ShellCommand c = ShellCommand.getInstance(mLogger);
if (c.execute("condor_config_val", "MOUNT_UNDER_SCRATCH") == 0) {
String stdout = c.getSTDOut();
// remove enclosing quotes if any
stdout = stdout.replaceAll("^\"|\"$", "");
String[] dirs = stdout.split(",");
for (int i = 0; i < dirs.length; i++) {
try {
// make sure it is a file path
mMountUnderScratchDirs.add(new File(dirs[i]).getAbsolutePath());
} catch (Exception e) {
/* ignore */
}
}
}
mLogger.log(
"Mount Under Scratch Directories " + mMountUnderScratchDirs,
LogManager.DEBUG_MESSAGE_LEVEL);
}
/**
* Apply a style to an AggregatedJob
*
* @param job the <code>AggregatedJob</code> object containing the job.
* @throws CondorStyleException in case of any error occuring code generation.
*/
public void apply(AggregatedJob job) throws CondorStyleException {
// apply style to all constituent jobs
for (Iterator it = job.constituentJobsIterator(); it.hasNext(); ) {
Job j = (Job) it.next();
this.apply(j);
}
// also apply style to the aggregated job itself
this.apply((Job) job);
}
/**
* Empty implementation.
*
* @param site the site catalog entry object
* @throws CondorStyleException in case of any error occuring code generation.
*/
public void apply(SiteCatalogEntry site) throws CondorStyleException {
// do nothing
}
/**
* Examines the credential requirements for a job and adds appropiate transfer and environment
* directives for the credentials to be staged and picked up by the job.
*
* @param job
*/
protected void applyCredentialsForRemoteExec(Job job) throws CondorStyleException {
// sanity check
if (!job.requiresCredentials()) {
return;
}
applyCredentialsForJobSubmission(job);
Set<String> credentialsForCondorFileTransfer = new HashSet();
// jobs can have multiple credential requirements
// and may need credentials associated with different sites PM-731
for (Map.Entry<String, Set<CredentialHandler.TYPE>> entry :
job.getCredentialTypes().entrySet()) {
String siteHandle = entry.getKey();
for (CredentialHandler.TYPE credType : entry.getValue()) {
CredentialHandler handler = mCredentialFactory.loadInstance(credType);
// if the credential is listed in the remote sites environment, don't do anything
SiteCatalogEntry site = mSiteStore.lookup(job.getSiteHandle());
if (site.getEnvironmentVariable(handler.getProfileKey()) != null
&& siteHandle.equals(job.getSiteHandle())) {
// the user has the enviornment variable specified in the site
// catalog pointing to an existing credential on the remote
// site and the job is going to run on the site for which we
// need the credential
continue;
}
// make sure we can have a path to credential
String credentialPath = handler.getPath(siteHandle);
if (credentialPath == null) {
this.complainForCredential(job, handler.getProfileKey(), siteHandle);
}
// PM-1150 verify credential
handler.verify(job, credType, credentialPath);
switch (credType) {
case x509:
// check if x509userproxy not already set. can be set
// as part of credentials for job submission
if (job.condorVariables.containsKey(Condor.X509USERPROXY_KEY)) {
// we can transfer the credential only via condor file io
// sanity check to to make sure not same as already set
String existing =
(String) job.condorVariables.get(Condor.X509USERPROXY_KEY);
if (!existing.equals(credentialPath)) {
credentialsForCondorFileTransfer.add(credentialPath);
job.envVariables.construct(
handler.getEnvironmentVariable(siteHandle),
handler.getBaseName(siteHandle));
}
} else {
// PM-1099 set the x509userproxy key directly
// we don's set the environment variable based on site name
// as for GRAM submissions, the proxy is renmaed by GRAM on the
// remote end tp the x509_user_proxy when placed in ~/.globus/job
// directory. GRAM then sets X509_USER_PROXY env variable to reflect
// the path to the proxy.
job.condorVariables.construct(Condor.X509USERPROXY_KEY, credentialPath);
}
break;
case credentials:
case irods:
case s3:
case boto:
case googlep12:
case ssh:
// transfer using condor file transfer, and advertise in env
// but first make sure it is specified in our environment
credentialsForCondorFileTransfer.add(credentialPath);
job.envVariables.construct(
handler.getEnvironmentVariable(siteHandle),
handler.getBaseName(siteHandle));
break;
default:
throw new CondorStyleException(
"Job has been tagged with unknown credential type");
}
} // for each credential for each site
}
// PM-1489 add credentials for job at end ensuring no duplicates
job.condorVariables.addIPFileForTransfer(credentialsForCondorFileTransfer);
}
/**
* Examines the credential requirements for a job and adds appropiate transfer and environment
* directives for the credentials to be picked up for the local job
*
* @param job
*/
protected void applyCredentialsForLocalExec(Job job) throws CondorStyleException {
// sanity check
if (!job.requiresCredentials()) {
return;
}
// associate any credentials if reqd for job submission.
this.applyCredentialsForJobSubmission(job, true);
// jobs can have multiple credential requirements
// and may need credentials associated with different sites PM-731
for (Map.Entry<String, Set<CredentialHandler.TYPE>> entry :
job.getCredentialTypes().entrySet()) {
// jobs can have multiple credential requirements
String siteHandle = entry.getKey();
for (CredentialHandler.TYPE credType : entry.getValue()) {
CredentialHandler handler = mCredentialFactory.loadInstance(credType);
switch (credType) {
case credentials:
case x509:
case irods:
case s3:
case boto:
case googlep12:
case ssh:
// for local exec, just set envionment variables to full path
String path = handler.getPath(siteHandle);
if (path == null) {
this.complainForCredential(job, handler.getProfileKey(), siteHandle);
}
// PM-1150 verify credential
handler.verify(job, credType, path);
// PM-1358 check if local credential path is valid or not
if (this.localCredentialPathValid(path)) {
job.envVariables.construct(
handler.getEnvironmentVariable(siteHandle), path);
} else {
// flag an error
this.complainForMountUnderScratch(job, path);
}
break;
default:
throw new CondorStyleException(
"Job has been tagged with unknown credential type");
}
}
}
}
/**
* Associates credentials required for job submission.
*
* @param job
* @throws CondorStyleException
*/
protected void applyCredentialsForJobSubmission(Job job) throws CondorStyleException {
this.applyCredentialsForJobSubmission(job, false);
}
/**
* Associates credentials required for job submission.
*
* @param job
* @param isLocal boolean indicating whether it is a local job or not
* @throws CondorStyleException
*/
protected void applyCredentialsForJobSubmission(Job job, boolean isLocal)
throws CondorStyleException {
// handle credential for job submission if set
if (job.getSubmissionCredential() == null) {
return;
}
// set the proxy for job submission
CredentialHandler.TYPE cred = job.getSubmissionCredential();
CredentialHandler handler = mCredentialFactory.loadInstance(cred);
String path = handler.getPath(job.getSiteHandle());
if (path == null) {
this.complainForCredential(job, handler.getProfileKey(), job.getSiteHandle());
}
switch (cred) {
case x509:
if (isLocal) {
// PM-1358 for validity
if (!this.localCredentialPathValid(path)) {
// flag an error
this.complainForMountUnderScratch(job, path);
}
handler.verify(job, cred, path);
}
job.condorVariables.construct(Condor.X509USERPROXY_KEY, path);
break;
default:
// only job submission via x509 is explicitly supported
throw new CondorStyleException(
"Invalid credential type for job submission "
+ cred
+ " for job "
+ job.getName());
}
return;
}
/**
* Complain if a particular credential key is not found for a site
*
* @param job
* @param key
* @param site
* @throws CondorStyleException
*/
protected void complainForCredential(Job job, String key, String site)
throws CondorStyleException {
StringBuilder error = new StringBuilder();
error.append("Unable to find required credential for file transfers for job ")
.append(job.getName())
.append(" . Please make sure that the key ")
.append(key)
.append(" is set as a Pegasus profile in the site catalog for site ")
.append(site)
.append(" or in your environment.");
throw new CondorStyleException(error.toString());
}
/**
* Complain if a particular credential is mounted under scratch in condor configuration
*
* @param job
* @param credential
* @throws CondorStyleException
*/
protected void complainForMountUnderScratch(Job job, String credential)
throws CondorStyleException {
StringBuilder error = new StringBuilder();
error.append("Local path to credential ")
.append(credential)
.append(" for job ")
.append(job.getID())
.append(
" is specified under MOUNT_UNDER_SCRATCH variable in condor configuration on the submit host")
.append(this.mMountUnderScratchDirs);
throw new CondorStyleException(error.toString());
}
/**
* Constructs an error message in case of style mismatch.
*
* @param job the job object.
* @param style the name of the style.
* @param universe the universe associated with the job.
*/
protected String errorMessage(Job job, String style, String universe) {
StringBuilder sb = new StringBuilder();
sb.append("(style,universe,site) mismatch for job ")
.append(job.getName())
.append(" ")
.append(". Found (")
.append(style)
.append(",")
.append(universe)
.append(",")
.append(job.getSiteHandle())
.append(")");
return sb.toString();
}
/**
* Returns whether local credential being used is valid w.r.t Condor MOUNT_UNDER_SCRATCH
* settings
*
* @param credential
* @return
*/
protected boolean localCredentialPathValid(String credential) {
boolean valid = true;
if (this.mMountUnderScratchDirs.isEmpty()) {
// no directories mentioned for mounting under scratch
return valid;
}
for (String dir : this.mMountUnderScratchDirs) {
if (new File(credential).getAbsolutePath().startsWith(dir)) {
// local proxy path points to a value that is mounted under scratch
valid = false;
break;
}
}
return valid;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. QueryParserTokenManager.java */
package org.apache.lucene.queryparser.classic;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.DateTools;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.TermQuery;
/** Token Manager. */
@SuppressWarnings("cast")
public class QueryParserTokenManager implements QueryParserConstants
{
private final int jjStopStringLiteralDfa_2(int pos, long active0)
{
switch (pos)
{
default :
return -1;
}
}
private final int jjStartNfa_2(int pos, long active0)
{
return jjMoveNfa_2(jjStopStringLiteralDfa_2(pos, active0), pos + 1);
}
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjMoveStringLiteralDfa0_2()
{
switch(curChar)
{
case 40:
return jjStopAtPos(0, 14);
case 41:
return jjStopAtPos(0, 15);
case 42:
return jjStartNfaWithStates_2(0, 17, 49);
case 43:
return jjStartNfaWithStates_2(0, 11, 15);
case 45:
return jjStartNfaWithStates_2(0, 12, 15);
case 58:
return jjStopAtPos(0, 16);
case 91:
return jjStopAtPos(0, 25);
case 94:
return jjStopAtPos(0, 18);
case 123:
return jjStopAtPos(0, 26);
default :
return jjMoveNfa_2(0, 0);
}
}
private int jjStartNfaWithStates_2(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_2(state, pos + 1);
}
static final long[] jjbitVec0 = {
0x1L, 0x0L, 0x0L, 0x0L
};
static final long[] jjbitVec1 = {
0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec3 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec4 = {
0xfffefffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private int jjMoveNfa_2(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 49;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 49:
case 33:
if ((0xfbff7cf8ffffd9ffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 0:
if ((0xfbff54f8ffffd9ffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
}
else if ((0x100002600L & l) != 0L)
{
if (kind > 7)
kind = 7;
}
else if ((0x280200000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
else if (curChar == 47)
jjCheckNAddStates(0, 2);
else if (curChar == 34)
jjCheckNAddStates(3, 5);
if ((0x7bff50f8ffffd9ffL & l) != 0L)
{
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
}
else if (curChar == 42)
{
if (kind > 22)
kind = 22;
}
else if (curChar == 33)
{
if (kind > 10)
kind = 10;
}
if (curChar == 38)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 4:
if (curChar == 38 && kind > 8)
kind = 8;
break;
case 5:
if (curChar == 38)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 13:
if (curChar == 33 && kind > 10)
kind = 10;
break;
case 14:
if ((0x280200000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
break;
case 15:
if ((0x100002600L & l) != 0L && kind > 13)
kind = 13;
break;
case 16:
if (curChar == 34)
jjCheckNAddStates(3, 5);
break;
case 17:
if ((0xfffffffbffffffffL & l) != 0L)
jjCheckNAddStates(3, 5);
break;
case 19:
jjCheckNAddStates(3, 5);
break;
case 20:
if (curChar == 34 && kind > 19)
kind = 19;
break;
case 22:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddStates(11, 14);
break;
case 23:
if (curChar == 46)
jjCheckNAdd(24);
break;
case 24:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddStates(15, 17);
break;
case 25:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 27:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 30:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 31:
if (curChar == 42 && kind > 22)
kind = 22;
break;
case 32:
if ((0xfbff54f8ffffd9ffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 35:
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 36:
case 38:
if (curChar == 47)
jjCheckNAddStates(0, 2);
break;
case 37:
if ((0xffff7fffffffffffL & l) != 0L)
jjCheckNAddStates(0, 2);
break;
case 40:
if (curChar == 47 && kind > 24)
kind = 24;
break;
case 41:
if ((0x7bff50f8ffffd9ffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 42:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 44:
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 45:
if ((0x7bff78f8ffffd9ffL & l) != 0L)
jjCheckNAddStates(18, 20);
break;
case 47:
jjCheckNAddStates(18, 20);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 49:
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
}
else if (curChar == 92)
jjCheckNAddTwoStates(35, 35);
break;
case 0:
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
}
else if (curChar == 92)
jjCheckNAddStates(21, 23);
else if (curChar == 126)
{
if (kind > 21)
kind = 21;
jjCheckNAddStates(24, 26);
}
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
}
if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 11;
else if (curChar == 124)
jjstateSet[jjnewStateCnt++] = 8;
else if (curChar == 79)
jjstateSet[jjnewStateCnt++] = 6;
else if (curChar == 65)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 1:
if (curChar == 68 && kind > 8)
kind = 8;
break;
case 2:
if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 3:
if (curChar == 65)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 6:
if (curChar == 82 && kind > 9)
kind = 9;
break;
case 7:
if (curChar == 79)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 8:
if (curChar == 124 && kind > 9)
kind = 9;
break;
case 9:
if (curChar == 124)
jjstateSet[jjnewStateCnt++] = 8;
break;
case 10:
if (curChar == 84 && kind > 10)
kind = 10;
break;
case 11:
if (curChar == 79)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 11;
break;
case 17:
if ((0xffffffffefffffffL & l) != 0L)
jjCheckNAddStates(3, 5);
break;
case 18:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 19:
jjCheckNAddStates(3, 5);
break;
case 21:
if (curChar != 126)
break;
if (kind > 21)
kind = 21;
jjCheckNAddStates(24, 26);
break;
case 25:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 26:
if (curChar == 92)
jjAddStates(27, 28);
break;
case 27:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 29:
if (curChar == 92)
jjAddStates(29, 30);
break;
case 30:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 32:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 33:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 34:
if (curChar == 92)
jjCheckNAddTwoStates(35, 35);
break;
case 35:
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 37:
jjAddStates(0, 2);
break;
case 39:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 38;
break;
case 41:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 42:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 43:
if (curChar == 92)
jjCheckNAddTwoStates(44, 44);
break;
case 44:
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 45:
if ((0x97ffffff87ffffffL & l) != 0L)
jjCheckNAddStates(18, 20);
break;
case 46:
if (curChar == 92)
jjCheckNAddTwoStates(47, 47);
break;
case 47:
jjCheckNAddStates(18, 20);
break;
case 48:
if (curChar == 92)
jjCheckNAddStates(21, 23);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 49:
case 33:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
{
if (kind > 7)
kind = 7;
}
if (jjCanMove_2(hiByte, i1, i2, l1, l2))
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
}
if (jjCanMove_2(hiByte, i1, i2, l1, l2))
{
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
}
break;
case 15:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 13)
kind = 13;
break;
case 17:
case 19:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(3, 5);
break;
case 25:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 27:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 30:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 32:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 35:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 37:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjAddStates(0, 2);
break;
case 41:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 42:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 44:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(42, 43);
break;
case 45:
if (jjCanMove_2(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(18, 20);
break;
case 47:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(18, 20);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 49 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private int jjMoveStringLiteralDfa0_0()
{
return jjMoveNfa_0(0, 0);
}
private int jjMoveNfa_0(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 3;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjAddStates(31, 32);
break;
case 1:
if (curChar == 46)
jjCheckNAdd(2);
break;
case 2:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAdd(2);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 3 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_1(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0x10000000L) != 0L)
{
jjmatchedKind = 32;
return 6;
}
return -1;
default :
return -1;
}
}
private final int jjStartNfa_1(int pos, long active0)
{
return jjMoveNfa_1(jjStopStringLiteralDfa_1(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_1()
{
switch(curChar)
{
case 84:
return jjMoveStringLiteralDfa1_1(0x10000000L);
case 93:
return jjStopAtPos(0, 29);
case 125:
return jjStopAtPos(0, 30);
default :
return jjMoveNfa_1(0, 0);
}
}
private int jjMoveStringLiteralDfa1_1(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_1(0, active0);
return 1;
}
switch(curChar)
{
case 79:
if ((active0 & 0x10000000L) != 0L)
return jjStartNfaWithStates_1(1, 28, 6);
break;
default :
break;
}
return jjStartNfa_1(0, active0);
}
private int jjStartNfaWithStates_1(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_1(state, pos + 1);
}
private int jjMoveNfa_1(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 7;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 0:
if ((0xfffffffeffffffffL & l) != 0L)
{
if (kind > 32)
kind = 32;
jjCheckNAdd(6);
}
if ((0x100002600L & l) != 0L)
{
if (kind > 7)
kind = 7;
}
else if (curChar == 34)
jjCheckNAddTwoStates(2, 4);
break;
case 1:
if (curChar == 34)
jjCheckNAddTwoStates(2, 4);
break;
case 2:
if ((0xfffffffbffffffffL & l) != 0L)
jjCheckNAddStates(33, 35);
break;
case 3:
if (curChar == 34)
jjCheckNAddStates(33, 35);
break;
case 5:
if (curChar == 34 && kind > 31)
kind = 31;
break;
case 6:
if ((0xfffffffeffffffffL & l) == 0L)
break;
if (kind > 32)
kind = 32;
jjCheckNAdd(6);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
case 6:
if ((0xdfffffffdfffffffL & l) == 0L)
break;
if (kind > 32)
kind = 32;
jjCheckNAdd(6);
break;
case 2:
jjAddStates(33, 35);
break;
case 4:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 3;
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
{
if (kind > 7)
kind = 7;
}
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
{
if (kind > 32)
kind = 32;
jjCheckNAdd(6);
}
break;
case 2:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjAddStates(33, 35);
break;
case 6:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 32)
kind = 32;
jjCheckNAdd(6);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 7 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
37, 39, 40, 17, 18, 20, 42, 45, 31, 46, 43, 22, 23, 25, 26, 24,
25, 26, 45, 31, 46, 44, 47, 35, 22, 28, 29, 27, 27, 30, 30, 0,
1, 2, 4, 5,
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 48:
return ((jjbitVec0[i2] & l2) != 0L);
default :
return false;
}
}
private static final boolean jjCanMove_1(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 0:
return ((jjbitVec3[i2] & l2) != 0L);
default :
if ((jjbitVec1[i1] & l1) != 0L)
return true;
return false;
}
}
private static final boolean jjCanMove_2(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 0:
return ((jjbitVec3[i2] & l2) != 0L);
case 48:
return ((jjbitVec1[i2] & l2) != 0L);
default :
if ((jjbitVec4[i1] & l1) != 0L)
return true;
return false;
}
}
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, null, null, null, null, "\53", "\55",
null, "\50", "\51", "\72", "\52", "\136", null, null, null, null, null, null,
"\133", "\173", null, "\124\117", "\135", "\175", null, null, };
/** Lexer state names. */
public static final String[] lexStateNames = {
"Boost",
"Range",
"DEFAULT",
};
/** Lex State array. */
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, -1, -1, -1, -1, -1, -1,
1, 1, 2, -1, 2, 2, -1, -1,
};
static final long[] jjtoToken = {
0x1ffffff01L,
};
static final long[] jjtoSkip = {
0x80L,
};
protected CharStream input_stream;
private final int[] jjrounds = new int[49];
private final int[] jjstateSet = new int[98];
protected char curChar;
/** Constructor. */
public QueryParserTokenManager(CharStream stream){
input_stream = stream;
}
/** Constructor. */
public QueryParserTokenManager(CharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(CharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 49; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(CharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 3 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String curTokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
curTokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, curTokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 2;
int defaultLexState = 2;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
switch(curLexState)
{
case 0:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else
{
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*========================================================================
*/
package com.gemstone.gemfire.internal;
import com.gemstone.gemfire.Statistics;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.admin.ListenerIdMap;
import com.gemstone.gemfire.internal.admin.remote.StatListenerMessage;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.internal.logging.log4j.LogMarker;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.logging.log4j.Logger;
/**
* GemFireStatSampler adds listeners and rolling archives to HostStatSampler.
* <p>
* The StatisticsManager is implemented by DistributedSystem.
*
* @author Darrel Schneider
* @author Kirk Lund
* @author Swapnil Bawaskar
*/
public final class GemFireStatSampler extends HostStatSampler {
private static final Logger logger = LogService.getLogger();
private final ListenerIdMap listeners = new ListenerIdMap();
// TODO: change the listener maps to be copy-on-write
private final Map<LocalStatListenerImpl, Boolean> localListeners =
new ConcurrentHashMap<LocalStatListenerImpl, Boolean>();
private final Map<InternalDistributedMember, List<RemoteStatListenerImpl>> recipientToListeners =
new HashMap<InternalDistributedMember, List<RemoteStatListenerImpl>>();
private final InternalDistributedSystem con;
private int nextListenerId = 1;
private ProcessStats processStats = null;
////////////////////// Constructors //////////////////////
public GemFireStatSampler(InternalDistributedSystem con) {
super(con.getCancelCriterion(), new StatSamplerStats(con, con.getId()));
this.con = con;
}
/**
* Returns the <code>ProcessStats</code> for this Java VM. Note
* that <code>null</code> will be returned if operating statistics
* are disabled.
*
* @since 3.5
*/
public final ProcessStats getProcessStats() {
return this.processStats;
}
@Override
public String getProductDescription() {
return "GemFire " + GemFireVersion.getGemFireVersion()
+ " #" + GemFireVersion.getBuildId()
+ " as of " + GemFireVersion.getSourceDate();
}
public int addListener(InternalDistributedMember recipient, long resourceId, String statName) {
int result = getNextListenerId();
synchronized (listeners) {
while (listeners.get(result) != null) {
// previous one was still being used
result = getNextListenerId();
}
RemoteStatListenerImpl sl = RemoteStatListenerImpl.create(result, recipient,
resourceId, statName, this);
listeners.put(result, sl);
List<RemoteStatListenerImpl> l = recipientToListeners.get(recipient);
if (l == null) {
l = new ArrayList<RemoteStatListenerImpl>();
recipientToListeners.put(recipient, l);
}
l.add(sl);
}
return result;
}
public boolean removeListener(int listenerId) {
synchronized (listeners) {
RemoteStatListenerImpl sl = (RemoteStatListenerImpl)listeners.remove(listenerId);
if (sl != null) {
List<RemoteStatListenerImpl> l = recipientToListeners.get(sl.getRecipient());
l.remove(sl);
}
return sl != null;
}
}
public void removeListenersByRecipient(InternalDistributedMember recipient) {
synchronized (listeners) {
List<RemoteStatListenerImpl> l = recipientToListeners.get(recipient);
if (l != null && l.size() != 0) {
for (RemoteStatListenerImpl sl : l) {
listeners.remove(sl.getListenerId());
}
recipientToListeners.remove(recipient);
}
}
}
public void addLocalStatListener(LocalStatListener l, Statistics stats, String statName) {
LocalStatListenerImpl sl = null;
synchronized (LocalStatListenerImpl.class) {
sl = LocalStatListenerImpl.create(l, stats, statName);
}
this.localListeners.put(sl, Boolean.TRUE);
}
public boolean removeLocalStatListener(LocalStatListener listener) {
Iterator<Map.Entry<LocalStatListenerImpl, Boolean>> it =
this.localListeners.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<LocalStatListenerImpl, Boolean> entry = it.next();
if (listener.equals(entry.getKey().getListener())) {
it.remove();
return true;
}
}
return false;
}
public Set<LocalStatListenerImpl> getLocalListeners() {
return this.localListeners.keySet();
}
@Override
public final File getArchiveFileName() {
return this.con.getConfig().getStatisticArchiveFile();
}
@Override
public final long getArchiveFileSizeLimit() {
if (fileSizeLimitInKB()) {
// use KB instead of MB to speed up rolling for testing
return ((long)this.con.getConfig().getArchiveFileSizeLimit()) * (1024);
} else {
return ((long)this.con.getConfig().getArchiveFileSizeLimit()) * (1024*1024);
}
}
@Override
public final long getArchiveDiskSpaceLimit() {
if (fileSizeLimitInKB()) {
// use KB instead of MB to speed up removal for testing
return ((long)this.con.getConfig().getArchiveDiskSpaceLimit()) * (1024);
} else {
return ((long)this.con.getConfig().getArchiveDiskSpaceLimit()) * (1024*1024);
}
}
@Override
protected void checkListeners() {
checkLocalListeners();
synchronized (listeners) {
if (listeners.size() == 0) {
return;
}
long timeStamp = System.currentTimeMillis();
Iterator<Map.Entry<InternalDistributedMember, List<RemoteStatListenerImpl>>> it1 =
recipientToListeners.entrySet().iterator();
while (it1.hasNext()) {
if (stopRequested()) return;
Map.Entry<InternalDistributedMember, List<RemoteStatListenerImpl>> me =
it1.next();
List<RemoteStatListenerImpl> l = me.getValue();
if (l.size() > 0) {
InternalDistributedMember recipient = (InternalDistributedMember)me.getKey();
StatListenerMessage msg =
StatListenerMessage.create(timeStamp, l.size());
msg.setRecipient(recipient);
for (RemoteStatListenerImpl statListener : l) {
if (getStatisticsManager().statisticsExists(statListener.getStatId())) {
statListener.checkForChange(msg);
} else {
// its stale; indicate this with a negative listener id
// fix for bug 29405
msg.addChange(-statListener.getListenerId(), 0);
}
}
this.con.getDistributionManager().putOutgoing(msg);
}
}
}
}
@Override
protected final int getSampleRate() {
return this.con.getConfig().getStatisticSampleRate();
}
@Override
public final boolean isSamplingEnabled() {
return this.con.getConfig().getStatisticSamplingEnabled();
}
@Override
protected final StatisticsManager getStatisticsManager() {
return this.con;
}
@Override
protected final OsStatisticsFactory getOsStatisticsFactory() {
return this.con;
}
@Override
protected final long getSpecialStatsId() {
long statId = OSProcess.getId();
if (statId == 0 || statId == -1) {
statId = getStatisticsManager().getId();
}
return statId;
}
@Override
protected final void initProcessStats(long id) {
if (PureJavaMode.osStatsAreAvailable()) {
if (osStatsDisabled()) {
logger.info(LogMarker.STATISTICS, LocalizedMessage.create(LocalizedStrings.GemFireStatSampler_OS_STATISTIC_COLLECTION_DISABLED_BY_OSSTATSDISABLED_SYSTEM_PROPERTY));
} else {
int retVal = HostStatHelper.initOSStats();
if ( retVal != 0 ) {
logger.error(LogMarker.STATISTICS, LocalizedMessage.create(LocalizedStrings.GemFireStatSampler_OS_STATISTICS_FAILED_TO_INITIALIZE_PROPERLY_SOME_STATS_MAY_BE_MISSING_SEE_BUGNOTE_37160));
}
HostStatHelper.newSystem(getOsStatisticsFactory());
String statName = getStatisticsManager().getName();
if (statName == null || statName.length() == 0) {
statName = "javaApp" + getStatisticsManager().getId();
}
Statistics stats = HostStatHelper.newProcess(getOsStatisticsFactory(), id, statName + "-proc");
this.processStats = HostStatHelper.newProcessStats(stats);
}
}
}
@Override
protected final void sampleProcessStats(boolean prepareOnly) {
if (prepareOnly || osStatsDisabled() || !PureJavaMode.osStatsAreAvailable()) {
return;
}
List<Statistics> l = getStatisticsManager().getStatsList();
if (l == null) {
return;
}
if (stopRequested()) return;
HostStatHelper.readyRefreshOSStats();
synchronized (l) {
Iterator<Statistics> it = l.iterator();
while (it.hasNext()) {
if (stopRequested()) return;
StatisticsImpl s = (StatisticsImpl)it.next();
if (s.usesSystemCalls()) {
HostStatHelper.refresh((LocalStatisticsImpl)s);
}
}
}
}
@Override
protected final void closeProcessStats() {
if (PureJavaMode.osStatsAreAvailable()) {
if (!osStatsDisabled()) {
if (this.processStats != null) {
this.processStats.close();
}
HostStatHelper.closeOSStats();
}
}
}
private void checkLocalListeners() {
for (LocalStatListenerImpl st : this.localListeners.keySet()) {
if (getStatisticsManager().statisticsExists(st.getStatId())) {
st.checkForChange();
}
}
}
private int getNextListenerId() {
int result = nextListenerId++;
if (nextListenerId < 0) {
nextListenerId = 1;
}
return result;
}
protected static abstract class StatListenerImpl {
protected Statistics stats;
protected StatisticDescriptorImpl stat;
protected boolean oldValueInitialized = false;
protected long oldValue;
public long getStatId() {
if (this.stats.isClosed()) {
return -1;
} else {
return this.stats.getUniqueId();
}
}
protected abstract double getBitsAsDouble(long bits);
}
protected static abstract class LocalStatListenerImpl extends StatListenerImpl {
private LocalStatListener listener;
public LocalStatListener getListener() {
return this.listener;
}
static LocalStatListenerImpl create(LocalStatListener l, Statistics stats, String statName) {
LocalStatListenerImpl result = null;
StatisticDescriptorImpl stat = (StatisticDescriptorImpl)stats.nameToDescriptor(statName);
switch (stat.getTypeCode()) {
case StatisticDescriptorImpl.BYTE:
case StatisticDescriptorImpl.SHORT:
case StatisticDescriptorImpl.INT:
case StatisticDescriptorImpl.LONG:
result = new LocalLongStatListenerImpl();
break;
case StatisticDescriptorImpl.FLOAT:
result = new LocalFloatStatListenerImpl();
break;
case StatisticDescriptorImpl.DOUBLE:
result = new LocalDoubleStatListenerImpl();
break;
default:
throw new RuntimeException("Illegal field type " + stats.getType() + " for statistic");
}
result.stats = stats;
result.stat = stat;
result.listener = l;
return result;
}
/**
* Checks to see if the value of the stat has changed. If it has then
* the local listener is fired
*/
public void checkForChange() {
long currentValue = stats.getRawBits(stat);
if (oldValueInitialized) {
if (currentValue == oldValue) {
return;
}
} else {
oldValueInitialized = true;
}
oldValue = currentValue;
listener.statValueChanged(getBitsAsDouble(currentValue));
}
}
protected static class LocalLongStatListenerImpl extends LocalStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return bits;
}
}
protected static class LocalFloatStatListenerImpl extends LocalStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return Float.intBitsToFloat((int)bits);
}
}
protected static class LocalDoubleStatListenerImpl extends LocalStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return Double.longBitsToDouble(bits);
}
}
/**
* Used to register a StatListener.
*/
protected static abstract class RemoteStatListenerImpl extends StatListenerImpl{
private int listenerId;
private InternalDistributedMember recipient;
@Override
public final int hashCode() {
return listenerId;
}
@Override
public final boolean equals(Object o) {
if (o == null) {
return false;
}
if (o instanceof RemoteStatListenerImpl) {
return listenerId == ((RemoteStatListenerImpl)o).listenerId;
} else {
return false;
}
}
public int getListenerId() {
return this.listenerId;
}
public InternalDistributedMember getRecipient() {
return this.recipient;
}
static RemoteStatListenerImpl create(int listenerId, InternalDistributedMember recipient, long resourceId, String statName, HostStatSampler sampler) {
RemoteStatListenerImpl result = null;
Statistics stats = sampler.getStatisticsManager().findStatistics(resourceId);
StatisticDescriptorImpl stat = (StatisticDescriptorImpl)stats.nameToDescriptor(statName);
switch (stat.getTypeCode()) {
case StatisticDescriptorImpl.BYTE:
case StatisticDescriptorImpl.SHORT:
case StatisticDescriptorImpl.INT:
case StatisticDescriptorImpl.LONG:
result = new LongStatListenerImpl();
break;
case StatisticDescriptorImpl.FLOAT:
result = new FloatStatListenerImpl();
break;
case StatisticDescriptorImpl.DOUBLE:
result = new DoubleStatListenerImpl();
break;
default:
throw new RuntimeException(LocalizedStrings.GemFireStatSampler_ILLEGAL_FIELD_TYPE_0_FOR_STATISTIC.toLocalizedString(stats.getType()));
}
result.stats = stats;
result.stat = stat;
result.listenerId = listenerId;
result.recipient = recipient;
return result;
}
/**
* Checks to see if the value of the stat has changed. If it has then it
* adds that change to the specified message.
*/
public void checkForChange(StatListenerMessage msg) {
long currentValue = stats.getRawBits(stat);
if (oldValueInitialized) {
if (currentValue == oldValue) {
return;
}
} else {
oldValueInitialized = true;
}
oldValue = currentValue;
msg.addChange(listenerId, getBitsAsDouble(currentValue));
}
}
protected static class LongStatListenerImpl extends RemoteStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return bits;
}
}
protected static class FloatStatListenerImpl extends RemoteStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return Float.intBitsToFloat((int)bits);
}
}
protected static class DoubleStatListenerImpl extends RemoteStatListenerImpl {
@Override
protected double getBitsAsDouble(long bits) {
return Double.longBitsToDouble(bits);
}
}
}
| |
/* Autor: Carlos Aponte -- 09-10041
*
* Clase: Servlet Incidentes
*
* Descripcion: este servlet se encarga de enviar la lista de los incidentes almacenados en el servidor,
* SIN los datos del denunciante. Asi mismo, se encarga de recibir una lista de incidentes enviado por
* un cliente y anexarla a la lista del servidor.
* */
package ccs.server;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.lang.reflect.Type;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
/**
* Servlet implementation class Incidentes
*/
@WebServlet("/Incidentes")
public class Incidentes extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public Incidentes() {
super();
// TODO Auto-generated constructor stub
}
//Clase incidente compatible con la clase incidente que usan los clientes.
private class Incidente {
Double lat; // Ubicacion donde ocurrio el incidente.
Double lon;
Date hora; // Fecha y hora del incidente.
String tipo; // Tipo del incidente.
String denunciante; // Quien es el usuario que lo reporta.
public Incidente(Double l1, Double l2, Date h, String t, String d){
lat = l1;
lon = l2;
hora = h;
tipo = t;
denunciante = d;
}
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
* response)
*
* Metodo que se ejecuta cada vez que hay una peticion de tipo GET. En este metodo
* se carga la lista d eincidentes del servidor, y se les transforma en un objeso JSON,
* el cual es enviado a los clientes.
*
*/
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
//Se carga el archivo de incidentes
PrintWriter out = response.getWriter();
String home = System.getProperty("user.home");
String path = home + "/server-ccs/";
String archivo = path + "lista_incidentes.xml";
File a = new File(archivo);
FileInputStream in = new FileInputStream(a);
try {
// Se parsea el documento XML
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document documento = db.parse(in);
documento.getDocumentElement().normalize();
// Se desgloza el XML en funcion del tag incidente
NodeList nodos = documento.getElementsByTagName("incidente");
ArrayList<Incidente> l = new ArrayList<Incidente>();
for (int i = 0; i < nodos.getLength(); i++) {
Node nodo = nodos.item(i);
// Para cada incidente se le obtienen y almacenan sus datos
if (nodo.getNodeType() == Node.ELEMENT_NODE) {
double lat = Double.parseDouble(obtenerValor("latitud",
(Element) nodo));
double lon = Double.parseDouble(obtenerValor("longitud",
(Element) nodo));
String tipo = obtenerValor("tipo", (Element) nodo);
String hora = obtenerValor("hora", (Element) nodo);
String denunciante = obtenerValor("denunciante", (Element) nodo);
denunciante = "100000";
DateFormat formato = new SimpleDateFormat("dd-MM-yyyy' -- 'HH:mm");
Date h = formato.parse(hora);
Incidente tmp = new Incidente(lat,lon,h,tipo,denunciante);
l.add(tmp);
}
}
//Se crea un json transformando la lista de incidentes. Esto se hace
//usando la libreria Gson de google.
String json = new Gson().toJson(l);
out.print(json);
in.close();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
* response)
*
* Metodo que se ejecuta cada vez que hay una peticion de tipo POST. En este metodo
* se recibe una lista lista de incidentes desde un cliente, y se les anexa al servidor.
*
*/
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
//Procesamo el Json Mandado por el cliente
processRequest(request, response);
}
// Funcion que devuelve el valor asociado a una etiqueta dada
private static String obtenerValor(String etiqueta, Element elemento) {
return elemento.getElementsByTagName(etiqueta).item(0).getChildNodes()
.item(0).getNodeValue();
}
// Escribe los incidentes que han sido reportados por los clientesen el
// archivo
private boolean reportarIncidentes(ArrayList<Incidente> l)
throws IOException {
//Preparamos el archivo de incidentes
String home = System.getProperty("user.home");
String path = home + "/server-ccs/";
String archivo = path + "lista_incidentes.xml";
String data = "";
for (int i = 0; i < l.size(); i++) {
// Se carga cada incidente
Incidente r = l.get(i);
DateFormat formato = new SimpleDateFormat(
"dd-MM-yyyy' -- 'HH:mm");
String tiempo = "";
tiempo = formato.format(r.hora);
data = data + "\t<incidente>" + "\n\t\t<latitud>" + r.lat
+ "</latitud>" + "\n\t\t<longitud>" + r.lon + "</longitud>"
+ "\n\t\t<hora>" + tiempo + "</hora>" + "\n\t\t<tipo>"
+ r.tipo + "</tipo>" + "\n\t\t<denunciante>"
+ r.denunciante + "</denunciante>" + "\n\t</incidente>"
+ "\n";
}
data = data + "\n</lista-incidentes>";
// Se lee todo el archivo, y se le anexa los nuevos incidente al
// final.
try {
// Cargamos el contenido actual
FileInputStream fis = new FileInputStream(archivo);
BufferedReader buffer = new BufferedReader(new InputStreamReader(fis));
StringBuilder sb = new StringBuilder();
String linea, info, new_info;
new_info = "";
while ((linea = buffer.readLine()) != null) {
sb.append(linea + "\n");
}
fis.close();
// Agregamos el nuevo incidente
info = sb.toString();
if (info == "") {
new_info = "<lista-incidentes>" + data;
} else {
if (info.contains("</lista-incidentes>")) {
new_info = info.replace("</lista-incidentes>", data);
}
}
// Guardamos el nuevo archivo.
FileOutputStream fos = new FileOutputStream(new File(archivo));
fos.write(new_info.getBytes());
fos.close();
return true;
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
}
return false;
}
// Funcuion que recibe el json de los incidentes y lo almacena en el archivo
protected void processRequest(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.setContentType("application/json");
// Gson gson = new Gson();
try {
//Carga todo lo enviado por el cliente en un solo String
StringBuilder sb = new StringBuilder();
String s;
while ((s = request.getReader().readLine()) != null) {
sb.append(s);
}
//Parsea el string del cliente, en funcion del tipo de informacion
//que contiene, en este caso en funcion de una lista de incidentes.
Gson gson = new Gson();
Type tipo = new TypeToken<List<Incidente>>(){}.getType();
JsonElement json = new JsonParser().parse(sb.toString());
List<Incidente> lista = gson.fromJson(json, tipo);
//Volvemos la lista de incidentes un array list.
ArrayList<Incidente> l = new ArrayList<Incidente>();
l.addAll(lista);
//Escribimos en el archivo
boolean result = reportarIncidentes(l);
if (result){
response.getOutputStream().print("ok");
}else{
response.getOutputStream().print("dail");
}
//Enviamos respuesta al cliente
response.getOutputStream().flush();
} catch (Exception ex) {
ex.printStackTrace();
response.getOutputStream().print("fail");
response.getOutputStream().flush();
}
}
}
| |
/*
* Copyright 2010, 2011 sshj contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.schmizz.sshj.sftp;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
public class RandomAccessRemoteFile
implements DataInput, DataOutput {
private final byte[] singleByte = new byte[1];
private final RemoteFile rf;
private long fp;
public RandomAccessRemoteFile(RemoteFile rf) {
this.rf = rf;
}
public long getFilePointer() {
return fp;
}
public void seek(long fp) {
this.fp = fp;
}
public int read()
throws IOException {
return read(singleByte, 0, 1) == -1 ? -1 : singleByte[0];
}
public int read(byte[] b)
throws IOException {
return read(b, 0, b.length);
}
public int read(byte[] b, int off, int len)
throws IOException {
final int count = rf.read(fp, b, off, len);
fp += count;
return count;
}
@Override
public boolean readBoolean()
throws IOException {
final int ch = read();
if (ch < 0)
throw new EOFException();
return (ch != 0);
}
@Override
public byte readByte()
throws IOException {
final int ch = this.read();
if (ch < 0)
throw new EOFException();
return (byte) (ch);
}
@Override
public char readChar()
throws IOException {
final int ch1 = this.read();
final int ch2 = this.read();
if ((ch1 | ch2) < 0)
throw new EOFException();
return (char) ((ch1 << 8) + ch2);
}
@Override
public double readDouble()
throws IOException {
return Double.longBitsToDouble(readLong());
}
@Override
public float readFloat()
throws IOException {
return Float.intBitsToFloat(readInt());
}
@Override
public void readFully(byte[] b)
throws IOException {
readFully(b, 0, b.length);
}
@Override
public void readFully(byte[] b, int off, int len)
throws IOException {
int n = 0;
do {
int count = read(b, off + n, len - n);
if (count < 0)
throw new EOFException();
n += count;
} while (n < len);
}
@Override
public int readInt()
throws IOException {
final int ch1 = read();
final int ch2 = read();
final int ch3 = read();
final int ch4 = read();
if ((ch1 | ch2 | ch3 | ch4) < 0)
throw new EOFException();
return ((ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4);
}
@Override
public String readLine()
throws IOException {
StringBuffer input = new StringBuffer();
int c = -1;
boolean eol = false;
while (!eol)
switch (c = read()) {
case -1:
case '\n':
eol = true;
break;
case '\r':
eol = true;
long cur = getFilePointer();
if ((read()) != '\n')
seek(cur);
break;
default:
input.append((char) c);
break;
}
if ((c == -1) && (input.length() == 0))
return null;
return input.toString();
}
@Override
public long readLong()
throws IOException {
return ((long) (readInt()) << 32) + (readInt() & 0xFFFFFFFFL);
}
@Override
public short readShort()
throws IOException {
final int ch1 = this.read();
final int ch2 = this.read();
if ((ch1 | ch2) < 0)
throw new EOFException();
return (short) ((ch1 << 8) + ch2);
}
@Override
public String readUTF()
throws IOException {
return DataInputStream.readUTF(this);
}
@Override
public int readUnsignedByte()
throws IOException {
final int ch = this.read();
if (ch < 0)
throw new EOFException();
return ch;
}
@Override
public int readUnsignedShort()
throws IOException {
final int ch1 = this.read();
final int ch2 = this.read();
if ((ch1 | ch2) < 0)
throw new EOFException();
return (ch1 << 8) + ch2;
}
@Override
public int skipBytes(int n)
throws IOException {
if (n <= 0)
return 0;
final long pos = getFilePointer();
final long len = rf.length();
long newpos = pos + n;
if (newpos > len)
newpos = len;
seek(newpos);
/* return the actual number of bytes skipped */
return (int) (newpos - pos);
}
@Override
public void write(int i)
throws IOException {
singleByte[0] = (byte) i;
write(singleByte);
}
@Override
public void write(byte[] b)
throws IOException {
write(b, 0, b.length);
}
@Override
public void write(byte[] b, int off, int len)
throws IOException {
rf.write(fp, b, off, len);
fp += (len - off);
}
@Override
public void writeBoolean(boolean v)
throws IOException {
write(v ? 1 : 0);
}
@Override
public void writeByte(int v)
throws IOException {
write(v);
}
@Override
public void writeBytes(String s)
throws IOException {
final byte[] b = s.getBytes();
write(b, 0, b.length);
}
@Override
public void writeChar(int v)
throws IOException {
write((v >>> 8) & 0xFF);
write(v & 0xFF);
}
@Override
public void writeChars(String s)
throws IOException {
final int clen = s.length();
final int blen = 2 * clen;
final byte[] b = new byte[blen];
final char[] c = new char[clen];
s.getChars(0, clen, c, 0);
for (int i = 0, j = 0; i < clen; i++) {
b[j++] = (byte) (c[i] >>> 8);
b[j++] = (byte) c[i];
}
write(b, 0, blen);
}
@Override
public void writeDouble(double v)
throws IOException {
writeLong(Double.doubleToLongBits(v));
}
@Override
public void writeFloat(float v)
throws IOException {
writeInt(Float.floatToIntBits(v));
}
@Override
public void writeInt(int v)
throws IOException {
write((v >>> 24) & 0xFF);
write((v >>> 16) & 0xFF);
write((v >>> 8) & 0xFF);
write(v & 0xFF);
}
@Override
public void writeLong(long v)
throws IOException {
write((int) (v >>> 56) & 0xFF);
write((int) (v >>> 48) & 0xFF);
write((int) (v >>> 40) & 0xFF);
write((int) (v >>> 32) & 0xFF);
write((int) (v >>> 24) & 0xFF);
write((int) (v >>> 16) & 0xFF);
write((int) (v >>> 8) & 0xFF);
write((int) v & 0xFF);
}
@Override
public void writeShort(int v)
throws IOException {
write((v >>> 8) & 0xFF);
write(v & 0xFF);
}
@Override
public void writeUTF(String str)
throws IOException {
final DataOutputStream dos = new DataOutputStream(rf.new RemoteFileOutputStream(fp));
try {
dos.writeUTF(str);
} finally {
dos.close();
}
fp += dos.size();
}
}
| |
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc.internal;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.HibernateException;
import org.hibernate.TransactionException;
import org.hibernate.engine.jdbc.batch.spi.Batch;
import org.hibernate.engine.jdbc.batch.spi.BatchBuilder;
import org.hibernate.engine.jdbc.batch.spi.BatchKey;
import org.hibernate.engine.jdbc.spi.InvalidatableWrapper;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcWrapper;
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
import org.hibernate.engine.jdbc.spi.ResultSetReturn;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.StatementPreparer;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.transaction.internal.TransactionCoordinatorImpl;
import org.hibernate.engine.transaction.spi.TransactionContext;
import org.hibernate.engine.transaction.spi.TransactionCoordinator;
import org.hibernate.engine.transaction.spi.TransactionEnvironment;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.jdbc.WorkExecutor;
import org.hibernate.jdbc.WorkExecutorVisitable;
import org.jboss.logging.Logger;
import org.jboss.logging.Logger.Level;
/**
* Standard Hibernate implementation of {@link JdbcCoordinator}
* <p/>
* IMPL NOTE : Custom serialization handling!
*
* @author Steve Ebersole
* @author Brett Meyer
*/
public class JdbcCoordinatorImpl implements JdbcCoordinator {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class, JdbcCoordinatorImpl.class.getName()
);
private transient TransactionCoordinator transactionCoordinator;
private final transient LogicalConnectionImpl logicalConnection;
private transient Batch currentBatch;
private transient long transactionTimeOutInstant = -1;
private final HashMap<Statement,Set<ResultSet>> xref = new HashMap<Statement,Set<ResultSet>>();
private final Set<ResultSet> unassociatedResultSets = new HashSet<ResultSet>();
private final SqlExceptionHelper exceptionHelper;
private Statement lastQuery;
/**
* If true, manually (and temporarily) circumvent aggressive release processing.
*/
private boolean releasesEnabled = true;
public JdbcCoordinatorImpl(
Connection userSuppliedConnection,
TransactionCoordinator transactionCoordinator) {
this.transactionCoordinator = transactionCoordinator;
this.logicalConnection = new LogicalConnectionImpl(
userSuppliedConnection,
transactionCoordinator.getTransactionContext().getConnectionReleaseMode(),
transactionCoordinator.getTransactionContext().getTransactionEnvironment().getJdbcServices(),
transactionCoordinator.getTransactionContext().getJdbcConnectionAccess()
);
this.exceptionHelper = logicalConnection.getJdbcServices().getSqlExceptionHelper();
}
public JdbcCoordinatorImpl(
LogicalConnectionImpl logicalConnection,
TransactionCoordinator transactionCoordinator) {
this.transactionCoordinator = transactionCoordinator;
this.logicalConnection = logicalConnection;
this.exceptionHelper = logicalConnection.getJdbcServices().getSqlExceptionHelper();
}
private JdbcCoordinatorImpl(LogicalConnectionImpl logicalConnection) {
this.logicalConnection = logicalConnection;
this.exceptionHelper = logicalConnection.getJdbcServices().getSqlExceptionHelper();
}
@Override
public TransactionCoordinator getTransactionCoordinator() {
return transactionCoordinator;
}
@Override
public LogicalConnectionImplementor getLogicalConnection() {
return logicalConnection;
}
protected TransactionEnvironment transactionEnvironment() {
return getTransactionCoordinator().getTransactionContext().getTransactionEnvironment();
}
protected SessionFactoryImplementor sessionFactory() {
return transactionEnvironment().getSessionFactory();
}
protected BatchBuilder batchBuilder() {
return sessionFactory().getServiceRegistry().getService( BatchBuilder.class );
}
public SqlExceptionHelper sqlExceptionHelper() {
return transactionEnvironment().getJdbcServices().getSqlExceptionHelper();
}
private int flushDepth = 0;
@Override
public void flushBeginning() {
if ( flushDepth == 0 ) {
releasesEnabled = false;
}
flushDepth++;
}
@Override
public void flushEnding() {
flushDepth--;
if ( flushDepth < 0 ) {
throw new HibernateException( "Mismatched flush handling" );
}
if ( flushDepth == 0 ) {
releasesEnabled = true;
}
afterStatementExecution();
}
@Override
public Connection close() {
LOG.tracev( "Closing JDBC container [{0}]", this );
if ( currentBatch != null ) {
LOG.closingUnreleasedBatch();
currentBatch.release();
}
cleanup();
return logicalConnection.close();
}
@Override
public Batch getBatch(BatchKey key) {
if ( currentBatch != null ) {
if ( currentBatch.getKey().equals( key ) ) {
return currentBatch;
}
else {
currentBatch.execute();
currentBatch.release();
}
}
currentBatch = batchBuilder().buildBatch( key, this );
return currentBatch;
}
@Override
public void executeBatch() {
if ( currentBatch != null ) {
currentBatch.execute();
currentBatch.release(); // needed?
}
}
@Override
public void abortBatch() {
if ( currentBatch != null ) {
currentBatch.release();
}
}
private transient StatementPreparer statementPreparer;
@Override
public StatementPreparer getStatementPreparer() {
if ( statementPreparer == null ) {
statementPreparer = new StatementPreparerImpl( this );
}
return statementPreparer;
}
private transient ResultSetReturn resultSetExtractor;
@Override
public ResultSetReturn getResultSetReturn() {
if ( resultSetExtractor == null ) {
resultSetExtractor = new ResultSetReturnImpl( this );
}
return resultSetExtractor;
}
@Override
public void setTransactionTimeOut(int seconds) {
transactionTimeOutInstant = System.currentTimeMillis() + ( seconds * 1000 );
}
@Override
public int determineRemainingTransactionTimeOutPeriod() {
if ( transactionTimeOutInstant < 0 ) {
return -1;
}
final int secondsRemaining = (int) ((transactionTimeOutInstant - System.currentTimeMillis()) / 1000);
if ( secondsRemaining <= 0 ) {
throw new TransactionException( "transaction timeout expired" );
}
return secondsRemaining;
}
@Override
public void afterStatementExecution() {
LOG.tracev( "Starting after statement execution processing [{0}]", connectionReleaseMode() );
if ( connectionReleaseMode() == ConnectionReleaseMode.AFTER_STATEMENT ) {
if ( ! releasesEnabled ) {
LOG.debug( "Skipping aggressive release due to manual disabling" );
return;
}
if ( hasRegisteredResources() ) {
LOG.debug( "Skipping aggressive release due to registered resources" );
return;
}
getLogicalConnection().releaseConnection();
}
}
@Override
public void afterTransaction() {
transactionTimeOutInstant = -1;
if ( connectionReleaseMode() == ConnectionReleaseMode.AFTER_STATEMENT ||
connectionReleaseMode() == ConnectionReleaseMode.AFTER_TRANSACTION ) {
if ( hasRegisteredResources() ) {
LOG.forcingContainerResourceCleanup();
releaseResources();
}
getLogicalConnection().aggressiveRelease();
}
}
private ConnectionReleaseMode connectionReleaseMode() {
return getLogicalConnection().getConnectionReleaseMode();
}
@Override
public <T> T coordinateWork(WorkExecutorVisitable<T> work) {
Connection connection = getLogicalConnection().getConnection();
try {
T result = work.accept( new WorkExecutor<T>(), connection );
afterStatementExecution();
return result;
}
catch ( SQLException e ) {
throw sqlExceptionHelper().convert( e, "error executing work" );
}
}
@Override
public boolean isReadyForSerialization() {
return getLogicalConnection().isUserSuppliedConnection()
? ! getLogicalConnection().isPhysicallyConnected()
: ! hasRegisteredResources();
}
public void serialize(ObjectOutputStream oos) throws IOException {
if ( ! isReadyForSerialization() ) {
throw new HibernateException( "Cannot serialize Session while connected" );
}
logicalConnection.serialize( oos );
}
public static JdbcCoordinatorImpl deserialize(
ObjectInputStream ois,
TransactionContext transactionContext) throws IOException, ClassNotFoundException {
return new JdbcCoordinatorImpl( LogicalConnectionImpl.deserialize( ois, transactionContext ) );
}
public void afterDeserialize(TransactionCoordinatorImpl transactionCoordinator) {
this.transactionCoordinator = transactionCoordinator;
}
@Override
public void register(Statement statement) {
LOG.tracev( "Registering statement [{0}]", statement );
if ( xref.containsKey( statement ) ) {
throw new HibernateException( "statement already registered with JDBCContainer" );
}
xref.put( statement, null );
}
@Override
@SuppressWarnings({ "unchecked" })
public void registerLastQuery(Statement statement) {
LOG.tracev( "Registering last query statement [{0}]", statement );
if ( statement instanceof JdbcWrapper ) {
JdbcWrapper<Statement> wrapper = ( JdbcWrapper<Statement> ) statement;
registerLastQuery( wrapper.getWrappedObject() );
return;
}
lastQuery = statement;
}
@Override
public void cancelLastQuery() {
try {
if (lastQuery != null) {
lastQuery.cancel();
}
}
catch (SQLException sqle) {
throw exceptionHelper.convert(
sqle,
"Cannot cancel query"
);
}
finally {
lastQuery = null;
}
}
@Override
public void release(Statement statement) {
LOG.tracev( "Releasing statement [{0}]", statement );
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets != null ) {
for ( ResultSet resultSet : resultSets ) {
close( resultSet );
}
resultSets.clear();
}
xref.remove( statement );
close( statement );
afterStatementExecution();
}
@Override
public void register(ResultSet resultSet, Statement statement) {
LOG.tracev( "Registering result set [{0}]", resultSet );
if ( statement == null ) {
try {
statement = resultSet.getStatement();
}
catch ( SQLException e ) {
throw exceptionHelper.convert( e, "unable to access statement from resultset" );
}
}
if ( statement != null ) {
// Keep this at DEBUG level, rather than warn. Numerous connection pool implementations can return a
// proxy/wrapper around the JDBC Statement, causing excessive logging here. See HHH-8210.
if ( LOG.isEnabled( Level.DEBUG ) && !xref.containsKey( statement ) ) {
LOG.unregisteredStatement();
}
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets == null ) {
resultSets = new HashSet<ResultSet>();
xref.put( statement, resultSets );
}
resultSets.add( resultSet );
}
else {
unassociatedResultSets.add( resultSet );
}
}
@Override
public void release(ResultSet resultSet, Statement statement) {
LOG.tracev( "Releasing result set [{0}]", resultSet );
if ( statement == null ) {
try {
statement = resultSet.getStatement();
}
catch ( SQLException e ) {
throw exceptionHelper.convert( e, "unable to access statement from resultset" );
}
}
if ( statement != null ) {
// Keep this at DEBUG level, rather than warn. Numerous connection pool implementations can return a
// proxy/wrapper around the JDBC Statement, causing excessive logging here. See HHH-8210.
if ( LOG.isEnabled( Level.DEBUG ) && !xref.containsKey( statement ) ) {
LOG.unregisteredStatement();
}
Set<ResultSet> resultSets = xref.get( statement );
if ( resultSets != null ) {
resultSets.remove( resultSet );
if ( resultSets.isEmpty() ) {
xref.remove( statement );
}
}
}
else {
boolean removed = unassociatedResultSets.remove( resultSet );
if ( !removed ) {
LOG.unregisteredResultSetWithoutStatement();
}
}
close( resultSet );
}
@Override
public boolean hasRegisteredResources() {
return ! xref.isEmpty() || ! unassociatedResultSets.isEmpty();
}
@Override
public void releaseResources() {
LOG.tracev( "Releasing JDBC container resources [{0}]", this );
cleanup();
}
@Override
public void enableReleases() {
releasesEnabled = true;
}
@Override
public void disableReleases() {
releasesEnabled = false;
}
private void cleanup() {
for ( Map.Entry<Statement,Set<ResultSet>> entry : xref.entrySet() ) {
if ( entry.getValue() != null ) {
closeAll( entry.getValue() );
}
close( entry.getKey() );
}
xref.clear();
closeAll( unassociatedResultSets );
}
protected void closeAll(Set<ResultSet> resultSets) {
for ( ResultSet resultSet : resultSets ) {
close( resultSet );
}
resultSets.clear();
}
@SuppressWarnings({ "unchecked" })
protected void close(Statement statement) {
LOG.tracev( "Closing prepared statement [{0}]", statement );
if ( statement instanceof InvalidatableWrapper ) {
InvalidatableWrapper<Statement> wrapper = ( InvalidatableWrapper<Statement> ) statement;
close( wrapper.getWrappedObject() );
wrapper.invalidate();
return;
}
try {
// if we are unable to "clean" the prepared statement,
// we do not close it
try {
if ( statement.getMaxRows() != 0 ) {
statement.setMaxRows( 0 );
}
if ( statement.getQueryTimeout() != 0 ) {
statement.setQueryTimeout( 0 );
}
}
catch( SQLException sqle ) {
// there was a problem "cleaning" the prepared statement
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Exception clearing maxRows/queryTimeout [%s]", sqle.getMessage() );
}
return; // EARLY EXIT!!!
}
statement.close();
if ( lastQuery == statement ) {
lastQuery = null;
}
}
catch( SQLException e ) {
LOG.debugf( "Unable to release JDBC statement [%s]", e.getMessage() );
}
catch ( Exception e ) {
// try to handle general errors more elegantly
LOG.debugf( "Unable to release JDBC statement [%s]", e.getMessage() );
}
}
@SuppressWarnings({ "unchecked" })
protected void close(ResultSet resultSet) {
LOG.tracev( "Closing result set [{0}]", resultSet );
if ( resultSet instanceof InvalidatableWrapper ) {
InvalidatableWrapper<ResultSet> wrapper = (InvalidatableWrapper<ResultSet>) resultSet;
close( wrapper.getWrappedObject() );
wrapper.invalidate();
return;
}
try {
resultSet.close();
}
catch( SQLException e ) {
LOG.debugf( "Unable to release JDBC result set [%s]", e.getMessage() );
}
catch ( Exception e ) {
// try to handle general errors more elegantly
LOG.debugf( "Unable to release JDBC result set [%s]", e.getMessage() );
}
}
}
| |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.charts.util;
import java.awt.Graphics2D;
import java.awt.geom.Rectangle2D;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.StringTokenizer;
import net.sf.jasperreports.charts.ChartTheme;
import net.sf.jasperreports.charts.ChartThemeBundle;
import net.sf.jasperreports.engine.DefaultJasperReportsContext;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.JRPrintHyperlink;
import net.sf.jasperreports.engine.JRPrintImageArea;
import net.sf.jasperreports.engine.JRPrintImageAreaHyperlink;
import net.sf.jasperreports.engine.JRPropertiesUtil;
import net.sf.jasperreports.engine.JRRuntimeException;
import net.sf.jasperreports.engine.JasperReportsContext;
import net.sf.jasperreports.engine.Renderable;
import net.sf.jasperreports.engine.RenderableUtil;
import net.sf.jasperreports.engine.fill.DefaultChartTheme;
import net.sf.jasperreports.engine.util.JRSingletonCache;
import org.jfree.chart.ChartRenderingInfo;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.axis.NumberTickUnit;
import org.jfree.chart.axis.TickUnit;
import org.jfree.chart.axis.TickUnitSource;
import org.jfree.chart.axis.TickUnits;
import org.jfree.chart.entity.ChartEntity;
import org.jfree.chart.entity.EntityCollection;
import org.jfree.data.Range;
/**
* @author Teodor Danciu (teodord@users.sourceforge.net)
* @version $Id: ChartUtil.java 7199 2014-08-27 13:58:10Z teodord $
*/
public final class ChartUtil
{
/**
*
*/
@SuppressWarnings("deprecation")
private static final JRSingletonCache<ChartRendererFactory> CHART_RENDERER_FACTORY_CACHE =
new JRSingletonCache<ChartRendererFactory>(ChartRendererFactory.class);
protected static final double AUTO_TICK_UNIT_THRESHOLD = 1e12;
protected static final double AUTO_TICK_UNIT_FACTOR = 1000d;
private JasperReportsContext jasperReportsContext;
/**
*
*/
private ChartUtil(JasperReportsContext jasperReportsContext)
{
this.jasperReportsContext = jasperReportsContext;
}
/**
*
*/
private static ChartUtil getDefaultInstance()//FIXMECONTEXT check this use of this
{
return new ChartUtil(DefaultJasperReportsContext.getInstance());
}
/**
*
*/
public static ChartUtil getInstance(JasperReportsContext jasperReportsContext)
{
return new ChartUtil(jasperReportsContext);
}
/**
*
*/
public static List<JRPrintImageAreaHyperlink> getImageAreaHyperlinks(
JFreeChart chart,
ChartHyperlinkProvider chartHyperlinkProvider,
Graphics2D grx,
Rectangle2D renderingArea
)// throws JRException
{
List<JRPrintImageAreaHyperlink> areaHyperlinks = null;
if (chartHyperlinkProvider != null && chartHyperlinkProvider.hasHyperlinks())
{
ChartRenderingInfo renderingInfo = new ChartRenderingInfo();
if (grx == null)
{
chart.createBufferedImage((int) renderingArea.getWidth(), (int) renderingArea.getHeight(), renderingInfo);
}
else
{
chart.draw(grx, renderingArea, renderingInfo);
}
EntityCollection entityCollection = renderingInfo.getEntityCollection();
if (entityCollection != null && entityCollection.getEntityCount() > 0)
{
areaHyperlinks = new ArrayList<JRPrintImageAreaHyperlink>(entityCollection.getEntityCount());
for (@SuppressWarnings("unchecked")
Iterator<ChartEntity> it = entityCollection.iterator(); it.hasNext();)
{
ChartEntity entity = it.next();
JRPrintHyperlink printHyperlink = chartHyperlinkProvider.getEntityHyperlink(entity);
if (printHyperlink != null)
{
JRPrintImageArea area = getImageArea(entity);
JRPrintImageAreaHyperlink areaHyperlink = new JRPrintImageAreaHyperlink();
areaHyperlink.setArea(area);
areaHyperlink.setHyperlink(printHyperlink);
areaHyperlinks.add(areaHyperlink);
}
}
}
}
return areaHyperlinks;
}
private static JRPrintImageArea getImageArea(ChartEntity entity)
{
JRPrintImageArea area = new JRPrintImageArea();
area.setShape(JRPrintImageArea.getShape(entity.getShapeType()));
int[] coordinates = getCoordinates(entity);
if (coordinates != null)
{
area.setCoordinates(coordinates);
}
return area;
}
private static int[] getCoordinates(ChartEntity entity)
{
int[] coordinates = null;
String shapeCoords = entity.getShapeCoords();
if (shapeCoords != null && shapeCoords.length() > 0)
{
StringTokenizer tokens = new StringTokenizer(shapeCoords, ",");
coordinates = new int[tokens.countTokens()];
int idx = 0;
while (tokens.hasMoreTokens())
{
String coord = tokens.nextToken();
coordinates[idx] = Integer.parseInt(coord);
++idx;
}
}
return coordinates;
}
/**
*
*/
public ChartTheme getTheme(String themeName)
{
if (themeName == null)
{
return new DefaultChartTheme();
}
List<ChartThemeBundle> themeBundles = jasperReportsContext.getExtensions(ChartThemeBundle.class);
for (Iterator<ChartThemeBundle> it = themeBundles.iterator(); it.hasNext();)
{
ChartThemeBundle bundle = it.next();
ChartTheme chartTheme = bundle.getChartTheme(themeName);
if (chartTheme != null)
{
return chartTheme;
}
}
throw new JRRuntimeException("Chart theme '" + themeName + "' not found.");
}
/**
* @deprecated Replaced by {@link #getTheme(String)}.
*/
public static ChartTheme getChartTheme(String themeName)
{
return getDefaultInstance().getTheme(themeName);
}
/**
*
*/
public ChartRenderableFactory getChartRenderableFactory(String renderType)
{
String factoryClass = JRPropertiesUtil.getInstance(jasperReportsContext).getProperty(ChartRenderableFactory.PROPERTY_CHART_RENDERER_FACTORY_PREFIX + renderType);
if (factoryClass == null)
{
throw new JRRuntimeException("No chart renderer factory specifyed for '" + renderType + "' render type.");
}
try
{
@SuppressWarnings("deprecation")
ChartRendererFactory factory = CHART_RENDERER_FACTORY_CACHE.getCachedInstance(factoryClass);
if (factory instanceof ChartRenderableFactory)
{
return (ChartRenderableFactory)factory;
}
return new WrappingChartRenderableFactory(factory);
}
catch (JRException e)
{
throw new JRRuntimeException(e);
}
}
/**
* @deprecated Replaced by {@link #getChartRenderableFactory(String)}.
*/
public static ChartRendererFactory getChartRendererFactory(String renderType)
{
return getDefaultInstance().getChartRenderableFactory(renderType);
}
/**
* @deprecated To be removed.
*/
public static class WrappingChartRenderableFactory implements ChartRenderableFactory
{
private ChartRendererFactory factory;
public WrappingChartRenderableFactory(ChartRendererFactory factory)
{
this.factory = factory;
}
public net.sf.jasperreports.engine.JRRenderable getRenderer(
JFreeChart chart,
ChartHyperlinkProvider chartHyperlinkProvider,
Rectangle2D rectangle
)
{
return factory.getRenderer(chart, chartHyperlinkProvider, rectangle);
}
public Renderable getRenderable(
JasperReportsContext jasperReportsContext,
JFreeChart chart,
ChartHyperlinkProvider chartHyperlinkProvider,
Rectangle2D rectangle)
{
net.sf.jasperreports.engine.JRRenderable deprecatedRenderer
= getRenderer(chart, chartHyperlinkProvider, rectangle);
return RenderableUtil.getWrappingRenderable(deprecatedRenderer);
}
}
/**
* @deprecated replaced by {@link #createIntegerTickUnits(Locale)}
*/
@Deprecated
public TickUnitSource createIntegerTickUnits()
{
return createIntegerTickUnits(Locale.getDefault());
}
public TickUnitSource createIntegerTickUnits(Locale locale)
{
DecimalFormatSymbols formatSymbols = DecimalFormatSymbols.getInstance(locale);
// copied from NumberAxis.createIntegerTickUnits() to preserve backward behaviour
TickUnits units = new TickUnits();
DecimalFormat df0 = new DecimalFormat("0", formatSymbols);
DecimalFormat df1 = new DecimalFormat("#,##0", formatSymbols);
units.add(new NumberTickUnit(1, df0));
units.add(new NumberTickUnit(2, df0));
units.add(new NumberTickUnit(5, df0));
units.add(new NumberTickUnit(10, df0));
units.add(new NumberTickUnit(20, df0));
units.add(new NumberTickUnit(50, df0));
units.add(new NumberTickUnit(100, df0));
units.add(new NumberTickUnit(200, df0));
units.add(new NumberTickUnit(500, df0));
units.add(new NumberTickUnit(1000, df1));
units.add(new NumberTickUnit(2000, df1));
units.add(new NumberTickUnit(5000, df1));
units.add(new NumberTickUnit(10000, df1));
units.add(new NumberTickUnit(20000, df1));
units.add(new NumberTickUnit(50000, df1));
units.add(new NumberTickUnit(100000, df1));
units.add(new NumberTickUnit(200000, df1));
units.add(new NumberTickUnit(500000, df1));
units.add(new NumberTickUnit(1000000, df1));
units.add(new NumberTickUnit(2000000, df1));
units.add(new NumberTickUnit(5000000, df1));
units.add(new NumberTickUnit(10000000, df1));
units.add(new NumberTickUnit(20000000, df1));
units.add(new NumberTickUnit(50000000, df1));
units.add(new NumberTickUnit(100000000, df1));
units.add(new NumberTickUnit(200000000, df1));
units.add(new NumberTickUnit(500000000, df1));
units.add(new NumberTickUnit(1000000000, df1));
units.add(new NumberTickUnit(2000000000, df1));
units.add(new NumberTickUnit(5000000000.0, df1));
units.add(new NumberTickUnit(10000000000.0, df1));
// adding further values by default because 1E10 is not enough for some people
// using getNumberInstance because that's what NumberAxis.createIntegerTickUnits does
units.add(new NumberTickUnit(20000000000L, df1));
units.add(new NumberTickUnit(50000000000L, df1));
units.add(new NumberTickUnit(100000000000L, df1));
units.add(new NumberTickUnit(200000000000L, df1));
units.add(new NumberTickUnit(500000000000L, df1));
units.add(new NumberTickUnit(1000000000000L, df1));
units.add(new NumberTickUnit(2000000000000L, df1));
units.add(new NumberTickUnit(5000000000000L, df1));
units.add(new NumberTickUnit(10000000000000L, df1));
units.add(new NumberTickUnit(20000000000000L, df1));
units.add(new NumberTickUnit(50000000000000L, df1));
units.add(new NumberTickUnit(100000000000000L, df1));
units.add(new NumberTickUnit(200000000000000L, df1));
units.add(new NumberTickUnit(500000000000000L, df1));
units.add(new NumberTickUnit(1000000000000000L, df1));
units.add(new NumberTickUnit(2000000000000000L, df1));
units.add(new NumberTickUnit(5000000000000000L, df1));
units.add(new NumberTickUnit(10000000000000000L, df1));
units.add(new NumberTickUnit(20000000000000000L, df1));
units.add(new NumberTickUnit(50000000000000000L, df1));
units.add(new NumberTickUnit(100000000000000000L, df1));
units.add(new NumberTickUnit(200000000000000000L, df1));
units.add(new NumberTickUnit(500000000000000000L, df1));
units.add(new NumberTickUnit(1000000000000000000L, df1));
units.add(new NumberTickUnit(2000000000000000000L, df1));
units.add(new NumberTickUnit(5000000000000000000L, df1));
return units;
}
/**
* @deprecated replaced by {@link #createStandardTickUnits(Locale)}
*/
@Deprecated
public TickUnitSource createStandardTickUnits()
{
return createStandardTickUnits(Locale.getDefault());
}
public TickUnitSource createStandardTickUnits(Locale locale)
{
DecimalFormatSymbols formatSymbols = DecimalFormatSymbols.getInstance(locale);
//copied from NumberAxis.createStandardTickUnits() to preserve backward behaviour
TickUnits units = new TickUnits();
DecimalFormat df0 = new DecimalFormat("0.00000000", formatSymbols);
DecimalFormat df1 = new DecimalFormat("0.0000000", formatSymbols);
DecimalFormat df2 = new DecimalFormat("0.000000", formatSymbols);
DecimalFormat df3 = new DecimalFormat("0.00000", formatSymbols);
DecimalFormat df4 = new DecimalFormat("0.0000", formatSymbols);
DecimalFormat df5 = new DecimalFormat("0.000", formatSymbols);
DecimalFormat df6 = new DecimalFormat("0.00", formatSymbols);
DecimalFormat df7 = new DecimalFormat("0.0", formatSymbols);
DecimalFormat df8 = new DecimalFormat("#,##0", formatSymbols);
//these two are probably not needed
DecimalFormat df9 = new DecimalFormat("#,###,##0", formatSymbols);
DecimalFormat df10 = new DecimalFormat("#,###,###,##0", formatSymbols);
// we can add the units in any order, the TickUnits collection will
// sort them...
units.add(new NumberTickUnit(0.0000001, df1));
units.add(new NumberTickUnit(0.000001, df2));
units.add(new NumberTickUnit(0.00001, df3));
units.add(new NumberTickUnit(0.0001, df4));
units.add(new NumberTickUnit(0.001, df5));
units.add(new NumberTickUnit(0.01, df6));
units.add(new NumberTickUnit(0.1, df7));
units.add(new NumberTickUnit(1, df8));
units.add(new NumberTickUnit(10, df8));
units.add(new NumberTickUnit(100, df8));
units.add(new NumberTickUnit(1000, df8));
units.add(new NumberTickUnit(10000, df8));
units.add(new NumberTickUnit(100000, df8));
units.add(new NumberTickUnit(1000000, df9));
units.add(new NumberTickUnit(10000000, df9));
units.add(new NumberTickUnit(100000000, df9));
units.add(new NumberTickUnit(1000000000, df10));
units.add(new NumberTickUnit(10000000000.0, df10));
units.add(new NumberTickUnit(100000000000.0, df10));
units.add(new NumberTickUnit(0.00000025, df0));
units.add(new NumberTickUnit(0.0000025, df1));
units.add(new NumberTickUnit(0.000025, df2));
units.add(new NumberTickUnit(0.00025, df3));
units.add(new NumberTickUnit(0.0025, df4));
units.add(new NumberTickUnit(0.025, df5));
units.add(new NumberTickUnit(0.25, df6));
units.add(new NumberTickUnit(2.5, df7));
units.add(new NumberTickUnit(25, df8));
units.add(new NumberTickUnit(250, df8));
units.add(new NumberTickUnit(2500, df8));
units.add(new NumberTickUnit(25000, df8));
units.add(new NumberTickUnit(250000, df8));
units.add(new NumberTickUnit(2500000, df9));
units.add(new NumberTickUnit(25000000, df9));
units.add(new NumberTickUnit(250000000, df9));
units.add(new NumberTickUnit(2500000000.0, df10));
units.add(new NumberTickUnit(25000000000.0, df10));
units.add(new NumberTickUnit(250000000000.0, df10));
units.add(new NumberTickUnit(0.0000005, df1));
units.add(new NumberTickUnit(0.000005, df2));
units.add(new NumberTickUnit(0.00005, df3));
units.add(new NumberTickUnit(0.0005, df4));
units.add(new NumberTickUnit(0.005, df5));
units.add(new NumberTickUnit(0.05, df6));
units.add(new NumberTickUnit(0.5, df7));
units.add(new NumberTickUnit(5L, df8));
units.add(new NumberTickUnit(50L, df8));
units.add(new NumberTickUnit(500L, df8));
units.add(new NumberTickUnit(5000L, df8));
units.add(new NumberTickUnit(50000L, df8));
units.add(new NumberTickUnit(500000L, df8));
units.add(new NumberTickUnit(5000000L, df9));
units.add(new NumberTickUnit(50000000L, df9));
units.add(new NumberTickUnit(500000000L, df9));
units.add(new NumberTickUnit(5000000000L, df10));
units.add(new NumberTickUnit(50000000000L, df10));
units.add(new NumberTickUnit(500000000000L, df10));
// adding further values by default because 5E11 is not enough for some people
units.add(new NumberTickUnit(1000000000000L, df8));
units.add(new NumberTickUnit(2500000000000L, df8));
units.add(new NumberTickUnit(5000000000000L, df8));
units.add(new NumberTickUnit(10000000000000L, df8));
units.add(new NumberTickUnit(25000000000000L, df8));
units.add(new NumberTickUnit(50000000000000L, df8));
units.add(new NumberTickUnit(100000000000000L, df8));
units.add(new NumberTickUnit(250000000000000L, df8));
units.add(new NumberTickUnit(500000000000000L, df8));
units.add(new NumberTickUnit(1000000000000000L, df8));
units.add(new NumberTickUnit(2500000000000000L, df8));
units.add(new NumberTickUnit(5000000000000000L, df8));
units.add(new NumberTickUnit(10000000000000000L, df8));
units.add(new NumberTickUnit(25000000000000000L, df8));
units.add(new NumberTickUnit(50000000000000000L, df8));
units.add(new NumberTickUnit(100000000000000000L, df8));
units.add(new NumberTickUnit(250000000000000000L, df8));
units.add(new NumberTickUnit(500000000000000000L, df8));
units.add(new NumberTickUnit(1000000000000000000L, df8));
units.add(new NumberTickUnit(2500000000000000000L, df8));
units.add(new NumberTickUnit(5000000000000000000L, df8));
return units;
}
public void setAutoTickUnit(NumberAxis numberAxis)
{
if (numberAxis.isAutoTickUnitSelection())
{
Range range = numberAxis.getRange();
if (range.getLength() >= AUTO_TICK_UNIT_THRESHOLD)
{
// this is a workaround for a floating point error makes JFreeChart
// select tick units that are too small when the values are very large
double autoSize = range.getLength() / AUTO_TICK_UNIT_THRESHOLD;
TickUnit unit = numberAxis.getStandardTickUnits().getCeilingTickUnit(autoSize);
numberAxis.setTickUnit((NumberTickUnit) unit, false, false);
}
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.validate.query;
import org.elasticsearch.ElasticSearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Required;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
/**
* A request to validate a specific query.
* <p/>
* <p>The request requires the query source to be set either using {@link #query(org.elasticsearch.index.query.QueryBuilder)},
* or {@link #query(byte[])}.
*/
public class ValidateQueryRequest extends BroadcastOperationRequest<ValidateQueryRequest> {
private static final XContentType contentType = Requests.CONTENT_TYPE;
private BytesReference querySource;
private boolean querySourceUnsafe;
private boolean explain;
private String[] types = Strings.EMPTY_ARRAY;
ValidateQueryRequest() {
}
/**
* Constructs a new validate request against the provided indices. No indices provided means it will
* run against all indices.
*/
public ValidateQueryRequest(String... indices) {
super(indices);
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
return validationException;
}
@Override
protected void beforeStart() {
if (querySourceUnsafe) {
querySource = querySource.copyBytesArray();
querySourceUnsafe = false;
}
}
/**
* The query source to execute.
*/
BytesReference querySource() {
return querySource;
}
/**
* The query source to execute.
*
* @see org.elasticsearch.index.query.QueryBuilders
*/
@Required
public ValidateQueryRequest query(QueryBuilder queryBuilder) {
this.querySource = queryBuilder.buildAsBytes();
this.querySourceUnsafe = false;
return this;
}
/**
* The query source to execute in the form of a map.
*/
@Required
public ValidateQueryRequest query(Map querySource) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.map(querySource);
return query(builder);
} catch (IOException e) {
throw new ElasticSearchGenerationException("Failed to generate [" + querySource + "]", e);
}
}
@Required
public ValidateQueryRequest query(XContentBuilder builder) {
this.querySource = builder.bytes();
this.querySourceUnsafe = false;
return this;
}
/**
* The query source to validate. It is preferable to use either {@link #query(byte[])}
* or {@link #query(org.elasticsearch.index.query.QueryBuilder)}.
*/
@Required
public ValidateQueryRequest query(String querySource) {
this.querySource = new BytesArray(querySource);
;
this.querySourceUnsafe = false;
return this;
}
/**
* The query source to validate.
*/
@Required
public ValidateQueryRequest query(byte[] querySource) {
return query(querySource, 0, querySource.length, false);
}
/**
* The query source to validate.
*/
@Required
public ValidateQueryRequest query(byte[] querySource, int offset, int length, boolean unsafe) {
return query(new BytesArray(querySource, offset, length), unsafe);
}
/**
* The query source to validate.
*/
@Required
public ValidateQueryRequest query(BytesReference querySource, boolean unsafe) {
this.querySource = querySource;
this.querySourceUnsafe = unsafe;
return this;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
String[] types() {
return this.types;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public ValidateQueryRequest types(String... types) {
this.types = types;
return this;
}
/**
* Indicate if detailed information about query is requested
*/
public void explain(boolean explain) {
this.explain = explain;
}
/**
* Indicates if detailed information about query is requested
*/
public boolean explain() {
return explain;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
querySourceUnsafe = false;
querySource = in.readBytesReference();
int typesSize = in.readVInt();
if (typesSize > 0) {
types = new String[typesSize];
for (int i = 0; i < typesSize; i++) {
types[i] = in.readUTF();
}
}
explain = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBytesReference(querySource);
out.writeVInt(types.length);
for (String type : types) {
out.writeUTF(type);
}
out.writeBoolean(explain);
}
@Override
public String toString() {
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(querySource, false);
} catch (Exception e) {
// ignore
}
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", querySource[" + sSource + "], explain:" + explain;
}
}
| |
package org.adligo.fabricate;
import org.adligo.fabricate.common.files.I_FabFileIO;
import org.adligo.fabricate.common.files.PatternFileMatcher;
import org.adligo.fabricate.common.files.xml_io.I_FabXmlFileIO;
import org.adligo.fabricate.common.i18n.I_CommandLineConstants;
import org.adligo.fabricate.common.i18n.I_FabricateConstants;
import org.adligo.fabricate.common.i18n.I_SystemMessages;
import org.adligo.fabricate.common.log.I_FabFileLog;
import org.adligo.fabricate.common.log.I_FabLog;
import org.adligo.fabricate.common.system.CommandLineArgs;
import org.adligo.fabricate.common.system.FabSystem;
import org.adligo.fabricate.common.system.FabSystemSetup;
import org.adligo.fabricate.common.system.FabricateEnvironment;
import org.adligo.fabricate.common.system.FabricateXmlDiscovery;
import org.adligo.fabricate.common.system.I_ExecutionResult;
import org.adligo.fabricate.common.system.I_Executor;
import org.adligo.fabricate.common.system.I_FailureTransport;
import org.adligo.fabricate.common.util.StringUtils;
import org.adligo.fabricate.depot.Depot;
import org.adligo.fabricate.depot.DepotContext;
import org.adligo.fabricate.java.JavaFactory;
import org.adligo.fabricate.java.ManifestParser;
import org.adligo.fabricate.managers.CommandManager;
import org.adligo.fabricate.managers.FabricationManager;
import org.adligo.fabricate.managers.ProjectsManager;
import org.adligo.fabricate.models.common.ExecutionEnvironmentMutant;
import org.adligo.fabricate.models.common.FabricationMemoryConstants;
import org.adligo.fabricate.models.common.FabricationMemoryMutant;
import org.adligo.fabricate.models.common.FabricationRoutineCreationException;
import org.adligo.fabricate.models.common.I_RoutineBrief;
import org.adligo.fabricate.models.common.MemoryLock;
import org.adligo.fabricate.models.common.RoutineBriefOrigin;
import org.adligo.fabricate.models.dependencies.DependencyVersionMismatchException;
import org.adligo.fabricate.models.fabricate.Fabricate;
import org.adligo.fabricate.models.fabricate.FabricateMutant;
import org.adligo.fabricate.models.project.I_Project;
import org.adligo.fabricate.repository.RepositoryManager;
import org.adligo.fabricate.routines.I_ProjectBriefsAware;
import org.adligo.fabricate.routines.I_ProjectsAware;
import org.adligo.fabricate.routines.I_RoutineBuilder;
import org.adligo.fabricate.routines.I_RoutineFabricateProcessorFactory;
import org.adligo.fabricate.routines.I_RoutinePopulatorMutant;
import org.adligo.fabricate.routines.RoutineBuilder;
import org.adligo.fabricate.routines.implicit.ImplicitRoutineFactory;
import org.adligo.fabricate.routines.implicit.ScmContext;
import org.adligo.fabricate.xml.io_v1.common_v1_0.RoutineParentType;
import org.adligo.fabricate.xml.io_v1.depot_v1_0.DepotType;
import org.adligo.fabricate.xml.io_v1.dev_v1_0.FabricateDevType;
import org.adligo.fabricate.xml.io_v1.fabricate_v1_0.FabricateType;
import org.adligo.fabricate.xml.io_v1.result_v1_0.FailureType;
import org.adligo.fabricate.xml.io_v1.result_v1_0.MachineInfoType;
import org.adligo.fabricate.xml.io_v1.result_v1_0.ResultType;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.Duration;
public class FabricateController {
private static FabricateEnvironment ENV = FabricateEnvironment.INSTANCE;
public static FabricateEnvironment getENV() {
return ENV;
}
public static void setENV(FabricateEnvironment eNV) {
ENV = eNV;
}
private final FabSystem sys_;
private final I_CommandLineConstants cmdMessages_;
private final I_FabricateConstants constants_;
private final I_SystemMessages sysMessages_;
private final I_FabLog log_;
private final I_FabFileIO files_;
private final I_FabXmlFileIO xmlFiles_;
private final FabricateFactory fabFactory_;
private final FabricateXmlDiscovery discovery_;
private I_RoutineFabricateProcessorFactory factory_;
private FabricateType fabXml_;
private boolean commands_;
private RepositoryManager repositoryManager_;
private I_FabFileLog fileLog_ = null;
private FabricationMemoryMutant<Object> memory_;
/**
* The first throwable thrown by either
* processing of a command or a build or share stage.
*/
private I_FailureTransport failure_;
private Fabricate fab_;
@SuppressWarnings("unused")
public static final void main(String [] args) throws Exception {
new FabricateController(new FabSystem(), args, new FabricateFactory());
}
public FabricateController(FabSystem sys, String [] args, FabricateFactory factory)
throws ClassNotFoundException, IOException {
Map<String,String> argMap = CommandLineArgs.parseArgs(args);
sys_ = sys;
fabFactory_ = factory;
files_ = sys.getFileIO();
xmlFiles_ = sys.getXmlFileIO();
constants_ = sys_.getConstants();
sysMessages_ = constants_.getSystemMessages();
cmdMessages_ = constants_.getCommandLineConstants();
FabSystemSetup.setup(sys, args);
log_ = sys.getLog();
discovery_ = factory.createDiscovery(sys);
if (!discovery_.hasFabricateXml()) {
log_.println(sysMessages_.getExceptionNoFabricateXmlOrProjectXmlFound());
return;
}
String fabricateDir = discovery_.getFabricateXmlDir();
String runMarker = fabricateDir + "run.marker";
if (files_.exists(runMarker)) {
log_.println(sysMessages_.getFabricateAppearsToBeAlreadyRunning() + sys_.lineSeparator() +
sysMessages_.getFabricateAppearsToBeAlreadyRunningPartTwo() + sys_.lineSeparator() +
fabricateDir);
return;
}
OutputStream fos = null;
try {
files_.create(runMarker);
files_.deleteOnExit(runMarker);
fos = files_.newFileOutputStream(runMarker);
String start = argMap.get("start");
fos.write(start.getBytes("UTF-8"));
} catch (IOException e1) {
log_.printTrace(e1);
log_.println(sysMessages_.getThereWasAProblemCreatingRunMarkerInTheFollowingDirectory() +
sys_.lineSeparator() + fabricateDir);
return;
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException x) {
//do nothing
}
}
}
String outputDir = fabricateDir + "output";
if (files_.exists(outputDir)) {
files_.deleteRecursive(outputDir);
}
if (!files_.mkdirs(outputDir)) {
File dir = files_.instance(outputDir);
String absPath = dir.getAbsolutePath();
log_.println(sysMessages_.getThereWasAProblemCreatingTheFollowingDirectory() + sys_.lineSeparator() +
absPath);
return;
}
String writeLogArg = cmdMessages_.getWriteLog(true);
if (sys.hasArg(writeLogArg)) {
fileLog_ = sys_.newFabFileLog(outputDir + files_.getNameSeparator() +
"fab.log");
sys.setLogFile(fileLog_);
}
log_.println(sys_.lineSeparator() + sysMessages_.getFabricating() +
sys_.lineSeparator());
List<String> argCommands = sys_.getArgValues(cmdMessages_.getCommand());
if (argCommands.size() >= 1) {
commands_ = true;
}
try {
if (!addXmlRoutines(factory, argCommands)) {
return;
}
} catch (DependencyVersionMismatchException x) {
log_.printTrace(x);
DependencyVersionMismatchException.logProjectError(sys_, discovery_.getFabricateXmlPath(), x);
return;
}
memory_ = addMemoryValues(sys, factory);
String depotDir = fab_.getFabricateXmlRunDir() + "depot";
String depotFile = depotDir + files_.getNameSeparator() + "depot.xml";
DepotType depotType = null;
if (log_.isLogEnabled(FabricateController.class)) {
log_.println("checking depot file " + depotFile);
}
if (files_.exists(depotFile)) {
depotType = xmlFiles_.parseDepot_v1_0(depotFile);
} else {
depotType = new DepotType();
}
memory_.put(FabricationMemoryConstants.DEPOT, new Depot(depotDir, new DepotContext(sys_), depotType));
memory_.addLock(new MemoryLock(FabricationMemoryConstants.DEPOT,
Collections.singleton(FabricateController.class.getName())));
repositoryManager_ = factory.createRepositoryManager(sys_, fab_);
I_RoutinePopulatorMutant populator = factory_.createRoutinePopulator();
setupRoutinePopulator(populator);
if (requiresProjects(argCommands)) {
if (!manageProjectsDirAndMode(factory)) {
return;
}
if (log_.isLogEnabled(FabricateController.class)) {
log_.println(sys_.lineSeparator() + sysMessages_.getRunningFacets());
}
I_RoutineBuilder routineBuilder = factory_.createRoutineBuilder(RoutineBriefOrigin.FACET, populator);
ProjectsManager pm = factory.createProjectsManager(sys_, factory_, routineBuilder);
failure_ = pm.setupAndRun(memory_);
if (checkFailure()) {
return;
}
}
//add in loaded projects
setupRoutinePopulatorAfterFacets(populator);
if (commands_) {
if (log_.isLogEnabled(FabricateController.class)) {
log_.println(sys_.lineSeparator() + sysMessages_.getRunningCommands());
}
I_RoutineBuilder routineBuilder = factory_.createRoutineBuilder(RoutineBriefOrigin.COMMAND, populator);
CommandManager manager = factory.createCommandManager(argCommands, sys_, factory_, routineBuilder);
failure_ = manager.processCommands(memory_);
if (checkFailure()) {
return;
}
} else {
if (log_.isLogEnabled(FabricateController.class)) {
log_.println(sys_.lineSeparator() + sysMessages_.getRunningBuildStages());
}
I_RoutineBuilder routineBuilder = factory_.createRoutineBuilder(RoutineBriefOrigin.STAGE, populator);
I_RoutineBuilder routineArchiveBuilder = factory_.createRoutineBuilder(RoutineBriefOrigin.ARCHIVE_STAGE, populator);
FabricationManager fabManager = factory.createFabricationManager(sys_, factory_, routineBuilder,
routineArchiveBuilder);
failure_ = fabManager.setupAndRunBuildStages(memory_);
if (checkFailure()) {
return;
}
if (sys_.hasArg(cmdMessages_.getArchive(true))) {
if (log_.isLogEnabled(FabricateController.class)) {
log_.println(sys_.lineSeparator() + sysMessages_.getRunningArchiveStages());
}
log_.println("TODO fabManager.setupAndRunArchiveStages");
}
}
writeResult();
}
private boolean checkFailure() throws IOException {
if (failure_ != null) {
writeResult();
return true;
}
return false;
}
private FabricationMemoryMutant<Object> addMemoryValues(FabSystem sys, FabricateFactory factory) {
FabricationMemoryMutant<Object> memory = factory.createMemory(sys);
memory.put(FabricationMemoryConstants.ENV,
new ExecutionEnvironmentMutant(sysMessages_));
memory.addLock(new MemoryLock(FabricationMemoryConstants.ENV,
Collections.singleton(FabricateController.class.getName())));
String javaHome = ENV.getJavaHome(sys_);
memory.put(FabricationMemoryConstants.JAVA_HOME, javaHome);
memory.addLock(new MemoryLock(FabricationMemoryConstants.JAVA_HOME,
Collections.singleton(FabricateController.class.getName())));
JavaFactory jFactory = factory.createJavaFactory();
memory.put(FabricationMemoryConstants.JAVA_FACTORY, jFactory);
memory.addLock(new MemoryLock(FabricationMemoryConstants.JAVA_FACTORY,
Collections.singleton(FabricateController.class.getName())));
return memory;
}
private boolean addXmlRoutines(FabricateFactory factory, List<String> argCommands)
throws IOException, ClassNotFoundException, DependencyVersionMismatchException {
String fabricateXmlPath = discovery_.getFabricateXmlPath();
fabXml_ = xmlFiles_.parseFabricate_v1_0(fabricateXmlPath);
fab_ = factory.create(sys_, fabXml_, discovery_);
FabricateMutant fm = factory.createMutant(fab_);
String fabricateHome = fm.getFabricateHome();
PatternFileMatcher pfm = new PatternFileMatcher(files_, sys_, "fabricate*", true);
List<String> files = files_.list(fabricateHome + files_.getNameSeparator() + "lib", pfm);
if (files.size() != 1) {
throw new IllegalStateException("no fabricate*.jar in " + fabricateHome + " lib?");
}
JavaFactory jFactory = factory.createJavaFactory();
ManifestParser mp = jFactory.newManifestParser();
String fabricateJar = files.get(0);
mp.readManifest(fabricateJar);
String version = mp.get("Specification-Version");
fm.setFabricateVersion(version);
List<RoutineParentType> traits = fabXml_.getTrait();
try {
if (traits != null) {
fm.addTraits(traits);
}
fm.addCommands(fabXml_.getCommand());
fm.addStagesAndProjects(fabXml_);
fab_ = factory.create(fm);
factory_ = factory.createRoutineFabricateFactory(sys_, fab_, commands_);
return true;
} catch (ClassNotFoundException x) {
String message = sysMessages_.getUnableToLoadTheFollowingClass() +
sys_.lineSeparator() + x.getMessage();
log_.println(message);
log_.printTrace(x);
return false;
}
}
/**
*
* @param factory
* @return false if fabricate failed
* @throws IOException
* @throws ClassNotFoundException
*/
private boolean manageProjectsDirAndMode(FabricateFactory factory) throws IOException, ClassNotFoundException {
FabricateMutant fm = factory.createMutant(fab_);
String devXmlDir = discovery_.getDevXmlDir();
String projectRunDir = discovery_.getProjectXmlDir();
if ( !StringUtils.isEmpty(devXmlDir)) {
//it was run from a project directory and a dev.xml file was discovered in
// the parent directory
fm.setDevelopmentMode(true);
fm.setProjectsDir(devXmlDir);
} else if (!StringUtils.isEmpty(projectRunDir)){
//it was run from the project dir like project_group/projects/projectX
String projectsDir = files_.getParentDir(projectRunDir);
fm.setProjectsDir(projectsDir);
} else {
String runDir = discovery_.getFabricateXmlDir();
//it was run from the project group dir
if (sys_.hasArg(cmdMessages_.getDevelopment(true))) {
//development mode
File runDirFile = files_.instance(runDir);
String projectsDir = files_.getParentDir(runDir);
fm.setProjectsDir(projectsDir);
fm.setDevelopmentMode(true);
String dev = projectsDir + "dev.xml";
if (!files_.exists(dev)) {
FabricateDevType devType = new FabricateDevType();
String projectGroup = runDirFile.getName();
devType.setProjectGroup(projectGroup);
xmlFiles_.writeDev_v1_0(dev, devType);
}
} else {
String projectsDir = runDir + "projects";
if (files_.exists(projectsDir)) {
if (sys_.hasArg(cmdMessages_.getPurge(true))) {
String fabricateDir = discovery_.getFabricateXmlDir();
//pull off the last slash
fabricateDir = fabricateDir.substring(0, fabricateDir.length() - 1);
//.idx and .pack files had a read only attribute set on Windows, this is the fix
// which allows the next Fabricate to purge (-p) the projects directory.
I_Executor exe = sys_.getExecutor();
I_ExecutionResult result = exe.executeProcess(FabricationMemoryConstants.EMPTY_ENV, fabricateDir,
"chmod", "-R", "+w", "projects");
if (result.getExitCode() != 0) {
String message = sysMessages_.getTheFollowingCommandLineProgramExitedAbnormallyWithExitCodeX();
message = message.replace("<X/>", "" + result.getExitCode());
throw new IllegalStateException(message + sys_.lineSeparator() +
fabricateDir + ": chmod -R +x projects");
}
try {
files_.deleteRecursive(projectsDir);
} catch (IOException x) {
log_.println(sysMessages_.getThereWasAProblemDeletingTheFollowingDirectory() +
sys_.lineSeparator() + projectsDir);
if (log_.isLogEnabled(FabricateController.class)) {
log_.printTrace(x);
}
return false;
}
makeProjectsDir(projectsDir);
}
} else {
makeProjectsDir(projectsDir);
}
fm.setProjectsDir(projectsDir + files_.getNameSeparator());
}
}
try {
fab_ = factory.create(fm);
} catch (DependencyVersionMismatchException x) {
log_.printTrace(x);
DependencyVersionMismatchException.logProjectError(sys_, discovery_.getFabricateXmlPath(), x);
return false;
}
factory_ = factory.createRoutineFabricateFactory(sys_, fab_, commands_);
String dir = fab_.getProjectsDir();
String message = sysMessages_.getProjectsAreLocatedInTheFollowingDirectory();
log_.println(message + sys_.lineSeparator() + dir);
return true;
}
public void logDuration(long duration) {
if (duration < 1000) {
String message = sysMessages_.getDurationWasXMilliseconds();
message = message.replaceFirst("<X/>", "" + duration);
log_.println(message);
} else if (duration > 1000 * 60) {
double mins = duration;
double divisor = 1000 * 60;
mins = mins / divisor;
BigDecimal bd = new BigDecimal(mins);
bd = bd.setScale(2, RoundingMode.HALF_UP);
String message = sysMessages_.getDurationWasXMinutes();
message = message.replaceFirst("<X/>", "" + bd.toPlainString());
log_.println(message);
} else {
double secs = duration;
double divisor = 1000;
secs = secs / divisor;
BigDecimal bd = new BigDecimal(secs);
bd = bd.setScale(2, RoundingMode.HALF_UP);
String message = sysMessages_.getDurationWasXSeconds();
message = message.replaceFirst("<X/>", "" + bd.toPlainString());
log_.println(message);
}
}
public void makeProjectsDir(String projectsDir) throws IOException {
if (!files_.mkdirs(projectsDir)) {
String message = sysMessages_.getThereWasAProblemCreatingTheFollowingDirectory();
throw new IOException(message + sys_.lineSeparator() +
projectsDir);
}
}
private boolean requiresProjects(List<String> argCommands) {
try {
if (commands_) {
if (factory_.anyCommandsAssignableTo(I_ProjectBriefsAware.class,
argCommands)) {
return true;
}
if (factory_.anyCommandsAssignableTo(I_ProjectsAware.class,
argCommands)) {
return true;
}
} else {
String archive = cmdMessages_.getArchive(true);
if (sys_.hasArg(archive)) {
List<String> aStages = sys_.getArgValues(cmdMessages_.getArchiveStages());
List<String> aSkips = sys_.getArgValues(cmdMessages_.getSkipArchives());
if (factory_.anyArchiveStagesAssignableTo(I_ProjectBriefsAware.class,
aStages, aSkips)) {
return true;
}
if (factory_.anyArchiveStagesAssignableTo(I_ProjectsAware.class,
aStages, aSkips)) {
return true;
}
}
List<String> stages = sys_.getArgValues(cmdMessages_.getStages());
List<String> skips = sys_.getArgValues(cmdMessages_.getSkip());
if (factory_.anyStagesAssignableTo(I_ProjectBriefsAware.class,
stages, skips)) {
return true;
}
if (factory_.anyStagesAssignableTo(I_ProjectsAware.class,
stages, skips)) {
return true;
}
}
} catch (FabricationRoutineCreationException x) {
FabricationRoutineCreationException.log(log_, sysMessages_, x);
}
return false;
}
@SuppressWarnings("boxing")
private void writeResult() throws IOException {
File resultFile = null;
String fabricateXmlPath = discovery_.getFabricateXmlDir();
resultFile = files_.create(fabricateXmlPath +
"output" + files_.getNameSeparator() + "result.xml");
ResultType result = new ResultType();
File fabricatePath = files_.instance(fabricateXmlPath);
String passable = sys_.getArgValue(CommandLineArgs.PASSABLE_ARGS_);
String [] array = CommandLineArgs.fromPassableString(passable);
StringBuilder sb = new StringBuilder();
sb.append("fab");
for (int i = 0; i < array.length; i++) {
sb.append(" ");
sb.append(array[i]);
}
result.setCommandLine(sb.toString());
result.setName(fabricatePath.getName());
String os = sys_.getOperatingSystem();
result.setOs(os);
String osVersion = sys_.getOperatingSystemVersion(os);
result.setOsVersion(osVersion);
if (failure_ == null) {
result.setSuccessful(true);
} else {
result.setSuccessful(false);
result.setFailure(failure_.getFailure());
}
String hostName = sys_.getHostname();
MachineInfoType machine = new MachineInfoType();
machine.setHostname(hostName);
machine.setProcessors(sys_.getAvailableProcessors());
machine.setRam(fab_.getXms());
String[] cpu = sys_.getCpuInfo(os);
machine.setCpuName(cpu[0]);
machine.setCpuSpeed(cpu[1]);
String jv = sys_.getJavaVersion();
machine.setJavaVersion(jv);
result.setMachine(machine);
long end = sys_.getCurrentTime();
String startString = sys_.getArgValue("start");
long start = new Long(startString);
long dur = end - start;
if (log_ != null) {
if (log_.isLogEnabled(FabricateController.class)) {
logDuration(dur);
}
} else {
logDuration(dur);
}
try {
DatatypeFactory df = sys_.newDatatypeFactory();
Duration duration = df.newDuration(dur);
result.setDuration(duration);
} catch (DatatypeConfigurationException e1) {
log_.printTrace(e1);
return;
}
xmlFiles_.writeResult_v1_0(resultFile.getAbsolutePath(), result);
if (result.isSuccessful()) {
log_.println(sysMessages_.getFabricationSuccessful());
} else {
if (failure_ != null) {
if (!failure_.isLogged()) {
FailureType f = failure_.getFailure();
log_.println(f.getDetail());
}
}
log_.println(sysMessages_.getFabricationFailed());
}
log_.derail();
if (fileLog_ != null) {
fileLog_.close();
}
sys_.exit(0);
}
private void setupRoutinePopulator(I_RoutinePopulatorMutant populatorMutant) {
populatorMutant.setRepositoryFactory(fabFactory_);
populatorMutant.setRepositoryManager(repositoryManager_);
populatorMutant.setRoutineProcessorFactory(factory_);
I_RoutineBrief scm = fab_.getScm();
if (scm != null) {
ScmContext scmCtx = new ScmContext(scm);
populatorMutant.putInput(ScmContext.class, scmCtx);
}
}
@SuppressWarnings("unchecked")
private void setupRoutinePopulatorAfterFacets(I_RoutinePopulatorMutant populatorMutant) {
List<I_Project> projects = (List<I_Project>) memory_.get(FabricationMemoryConstants.PARTICIPATING_PROJECTS);
populatorMutant.setProjects(projects);
}
}
| |
package com.mxgraph.swing.handler;
import java.awt.Cursor;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.MouseEvent;
import javax.swing.ImageIcon;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.SwingUtilities;
import com.mxgraph.swing.mxGraphComponent;
import com.mxgraph.swing.util.mxMouseAdapter;
import com.mxgraph.util.mxConstants;
import com.mxgraph.util.mxEvent;
import com.mxgraph.util.mxEventObject;
import com.mxgraph.util.mxEventSource.mxIEventListener;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.util.mxUtils;
import com.mxgraph.view.mxCellState;
/**
* Placeholder for rotation handler
*/
public class mxRotationHandler extends mxMouseAdapter
{
/**
*
*/
public static ImageIcon ROTATE_ICON = null;
/**
* Loads the collapse and expand icons.
*/
static
{
ROTATE_ICON = new ImageIcon(
mxRotationHandler.class
.getResource("/com/mxgraph/swing/images/rotate.gif"));
}
/**
*
*/
private static double PI4 = Math.PI / 4;
/**
* Reference to the enclosing graph component.
*/
protected mxGraphComponent graphComponent;
/**
* Specifies if this handler is enabled. Default is true.
*/
protected boolean enabled = true;
/**
*
*/
protected JComponent handle;
/**
*
*/
protected mxCellState currentState;
/**
*
*/
protected double initialAngle;
/**
*
*/
protected double currentAngle;
/**
*
*/
protected Point first;
/**
* Constructs a new rotation handler.
*/
public mxRotationHandler(mxGraphComponent graphComponent)
{
this.graphComponent = graphComponent;
graphComponent.addMouseListener(this);
handle = createHandle();
// Installs the paint handler
graphComponent.addListener(mxEvent.AFTER_PAINT, new mxIEventListener()
{
public void invoke(Object sender, mxEventObject evt)
{
Graphics g = (Graphics) evt.getProperty("g");
paint(g);
}
});
// Listens to all mouse events on the rendering control
graphComponent.getGraphControl().addMouseListener(this);
graphComponent.getGraphControl().addMouseMotionListener(this);
// Needs to catch events because these are consumed
handle.addMouseListener(this);
handle.addMouseMotionListener(this);
}
/**
*
*/
public mxGraphComponent getGraphComponent()
{
return graphComponent;
}
/**
*
*/
public boolean isEnabled()
{
return enabled;
}
/**
*
*/
public void setEnabled(boolean value)
{
enabled = value;
}
/**
*
*/
protected JComponent createHandle()
{
JLabel label = new JLabel(ROTATE_ICON);
label.setSize(ROTATE_ICON.getIconWidth(), ROTATE_ICON.getIconHeight());
label.setOpaque(false);
return label;
}
/**
*
*/
public boolean isStateHandled(mxCellState state)
{
return graphComponent.getGraph().getModel().isVertex(state.getCell());
}
/**
*
*/
public void mousePressed(MouseEvent e)
{
if (currentState != null && handle.getParent() != null
&& e.getSource() == handle /* mouse hits handle */)
{
start(e);
e.consume();
}
}
/**
*
*/
public void start(MouseEvent e)
{
initialAngle = mxUtils.getDouble(currentState.getStyle(),
mxConstants.STYLE_ROTATION) * mxConstants.RAD_PER_DEG;
currentAngle = initialAngle;
first = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(),
graphComponent.getGraphControl());
if (!graphComponent.getGraph().isCellSelected(currentState.getCell()))
{
graphComponent.selectCellForEvent(currentState.getCell(), e);
}
}
/**
*
*/
public void mouseMoved(MouseEvent e)
{
if (graphComponent.isEnabled() && isEnabled() && !e.isConsumed())
{
if (handle.getParent() != null && e.getSource() == handle /* mouse hits handle */)
{
graphComponent.getGraphControl().setCursor(
new Cursor(Cursor.HAND_CURSOR));
e.consume();
}
else if (currentState == null
|| !currentState.getRectangle().contains(e.getPoint()))
{
mxCellState eventState = graphComponent
.getGraph()
.getView()
.getState(
graphComponent.getCellAt(e.getX(), e.getY(),
false));
mxCellState state = null;
if (eventState != null && isStateHandled(eventState))
{
state = eventState;
}
if (currentState != state)
{
currentState = state;
if (currentState == null && handle.getParent() != null)
{
handle.setVisible(false);
handle.getParent().remove(handle);
}
else if (currentState != null)
{
if (handle.getParent() == null)
{
// Adds component for rendering the handles (preview is separate)
graphComponent.getGraphControl().add(handle, 0);
handle.setVisible(true);
}
handle.setLocation(
(int) (currentState.getX()
+ currentState.getWidth()
- handle.getWidth() - 4),
(int) (currentState.getY()
+ currentState.getHeight()
- handle.getWidth() - 4));
}
}
}
}
}
/**
*
*/
public void mouseDragged(MouseEvent e)
{
if (graphComponent.isEnabled() && isEnabled() && !e.isConsumed()
&& first != null)
{
mxRectangle dirty = mxUtils.getBoundingBox(currentState,
currentAngle * mxConstants.DEG_PER_RAD);
Point pt = SwingUtilities.convertPoint(e.getComponent(),
e.getPoint(), graphComponent.getGraphControl());
double cx = currentState.getCenterX();
double cy = currentState.getCenterY();
double dx = pt.getX() - cx;
double dy = pt.getY() - cy;
double c = Math.sqrt(dx * dx + dy * dy);
currentAngle = ((pt.getX() > cx) ? -1 : 1) * Math.acos(dy / c)
+ PI4 + initialAngle;
dirty.add(mxUtils.getBoundingBox(currentState, currentAngle
* mxConstants.DEG_PER_RAD));
dirty.grow(1);
// TODO: Compute dirty rectangle and repaint
graphComponent.getGraphControl().repaint(dirty.getRectangle());
e.consume();
}
else if (handle.getParent() != null)
{
handle.getParent().remove(handle);
}
}
/**
*
*/
public void mouseReleased(MouseEvent e)
{
if (graphComponent.isEnabled() && isEnabled() && !e.isConsumed()
&& first != null)
{
double deg = 0;
Object cell = null;
if (currentState != null)
{
cell = currentState.getCell();
/*deg = mxUtils.getDouble(currentState.getStyle(),
mxConstants.STYLE_ROTATION);*/
}
deg += currentAngle * mxConstants.DEG_PER_RAD;
boolean willExecute = cell != null && first != null;
// TODO: Call reset before execute in all handlers that
// offer an execute method
reset();
if (graphComponent.isEnabled() && isEnabled() && !e.isConsumed()
&& willExecute)
{
graphComponent.getGraph().setCellStyles(
mxConstants.STYLE_ROTATION, String.valueOf(deg),
new Object[] { cell });
graphComponent.getGraphControl().repaint();
e.consume();
}
}
}
/**
*
*/
public void reset()
{
if (handle.getParent() != null)
{
handle.getParent().remove(handle);
}
mxRectangle dirty = null;
if (currentState != null && first != null)
{
dirty = mxUtils.getBoundingBox(currentState, currentAngle
* mxConstants.DEG_PER_RAD);
dirty.grow(1);
}
currentState = null;
currentAngle = 0;
first = null;
if (dirty != null)
{
graphComponent.getGraphControl().repaint(dirty.getRectangle());
}
}
/**
*
*/
public void paint(Graphics g)
{
if (currentState != null && first != null)
{
Rectangle rect = currentState.getRectangle();
double deg = currentAngle * mxConstants.DEG_PER_RAD;
if (deg != 0)
{
((Graphics2D) g).rotate(Math.toRadians(deg),
currentState.getCenterX(), currentState.getCenterY());
}
mxUtils.setAntiAlias((Graphics2D) g, true, false);
g.drawRect(rect.x, rect.y, rect.width, rect.height);
}
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.form;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.unitime.localization.impl.Localization;
import org.unitime.localization.messages.CourseMessages;
import org.unitime.timetable.model.ClassInstructor;
import org.unitime.timetable.model.DepartmentalInstructor;
import org.unitime.timetable.model.Location;
import org.unitime.timetable.model.Preference;
import org.unitime.timetable.model.dao.DepartmentalInstructorDAO;
import org.unitime.timetable.model.dao.LocationDAO;
import org.unitime.timetable.util.DynamicList;
import org.unitime.timetable.util.DynamicListObjectFactory;
/**
* MyEclipse Struts
* Creation date: 12-08-2005
*
* XDoclet definition:
* @struts:form name="classEditForm"
*
* @author Tomas Muller, Zuzana Mullerova, Stephanie Schluttenhofer
*/
public class ClassEditForm extends PreferencesForm {
// --------------------------------------------------------- Class Constants
/**
* Comment for <code>serialVersionUID</code>
*/
private static final long serialVersionUID = 3257849883023915058L;
// Messages
protected final static CourseMessages MSG = Localization.create(CourseMessages.class);
/** Class Start/End Date Format **/
private final SimpleDateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
// --------------------------------------------------------- Instance Variables
private Integer nbrRooms;
private Integer expectedCapacity;
private Long classId;
private Long parentClassId;
private String section;
private Long managingDept;
private Long subpart;
private String className;
private String parentClassName;
private String itypeDesc;
private List instrLead;
private String managingDeptLabel;
private String notes;
private List instructors;
private List instrPctShare;
private List assignments;
private Long datePattern;
private String subjectAreaId;
private String instrOfferingId;
private String courseName;
private String courseTitle;
private Boolean displayInstructor;
private String schedulePrintNote;
private String classSuffix;
private Boolean enabledForStudentScheduling;
private Integer maxExpectedCapacity;
private Float roomRatio;
private Integer minRoomLimit;
private Boolean unlimitedEnroll;
private Integer enrollment;
//TODO Reservations Bypass - to be removed later
private Boolean isCrosslisted;
private String accommodation;
//End Bypass
// --------------------------------------------------------- Classes
/** Factory to create dynamic list element for Instructors */
protected DynamicListObjectFactory factoryInstructors = new DynamicListObjectFactory() {
public Object create() {
return new String(Preference.BLANK_PREF_VALUE);
}
};
// --------------------------------------------------------- Methods
/**
* Validate input data
* @param mapping
* @param request
* @return ActionErrors
*/
public ActionErrors validate(
ActionMapping mapping,
HttpServletRequest request) {
int iRoomCapacity = -1;
ActionErrors errors = new ActionErrors();
if(nbrRooms!=null && nbrRooms.intValue()<0)
errors.add("nbrRooms",
new ActionMessage("errors.generic", MSG.errorNumberOfRoomsNegative()) );
if (roomRatio==null || roomRatio.floatValue()<0.0f)
errors.add("nbrRooms",
new ActionMessage("errors.generic", MSG.errorRoomRatioNegative()) );
if(expectedCapacity==null || expectedCapacity.intValue()<0)
errors.add("expectedCapacity",
new ActionMessage("errors.generic", MSG.errorMinimumExpectedCapacityNegative()) );
if(maxExpectedCapacity==null || maxExpectedCapacity.intValue()<0)
errors.add("maxExpectedCapacity",
new ActionMessage("errors.generic", MSG.errorMaximumExpectedCapacityNegative()) );
else
if(maxExpectedCapacity.intValue()<expectedCapacity.intValue())
errors.add("maxExpectedCapacity",
new ActionMessage("errors.generic", MSG.errorMaximumExpectedCapacityLessThanMinimum()) );
if( managingDept==null || managingDept.longValue()<=0)
errors.add("managingDept",
new ActionMessage("errors.generic", MSG.errorRequiredClassOwner()) );
// Schedule print note has 2000 character limit
if(schedulePrintNote!=null && schedulePrintNote.length()>1999)
errors.add("notes",
new ActionMessage("errors.generic", MSG.errorSchedulePrintNoteLongerThan1999()) );
// Notes has 1000 character limit
if(notes!=null && notes.length()>999)
errors.add("notes",
new ActionMessage("errors.generic", MSG.errorNotesLongerThan999()) );
// At least one instructor is selected
if (instructors.size()>0) {
// Check no duplicates or blank instructors
if(!super.checkPrefs(instructors))
errors.add("instrPrefs",
new ActionMessage(
"errors.generic",
MSG.errorInvalidInstructorPreference()) );
/* -- 1 lead instructor not required
// Check Lead Instructor is set
if(instrLead==null
|| instrLead.trim().length()==0
|| !(new LongValidator().isValid(instrLead)) )
errors.add("instrLead",
new ActionMessage("errors.required", "Lead Instructor") );
*/
/* -- 100% percent share not required
// Check sum of all percent share = 100%
try {
int total = 0;
for (Iterator iter=instrPctShare.iterator(); iter.hasNext(); ) {
String pctShare = iter.next().toString();
if(Integer.parseInt(pctShare)<=0) {
errors.add("instrPctShare",
new ActionMessage(
"errors.integerGt", "Percent Share", "0") );
}
total += Integer.parseInt(pctShare);
}
if(total!=100) {
errors.add("instrPctShare",
new ActionMessage(
"errors.generic",
"Sum of all instructor percent shares must equal 100%") );
}
}
catch (Exception ex) {
errors.add("instrPctShare",
new ActionMessage(
"errors.generic",
"Invalid instructor percent shares specified.") );
}
*/
}
// Check that any room with a preference required has capacity >= room capacity for the class
if (iRoomCapacity>0) {
List rp = this.getRoomPrefs();
List rpl = this.getRoomPrefLevels();
for (int i=0; i<rpl.size(); i++) {
String pl = rpl.get(i).toString();
if (pl.trim().equalsIgnoreCase("1")) {
String roomId = rp.get(i).toString();
Location room = new LocationDAO().get(new Long(roomId));
int rCap = room.getCapacity().intValue();
if(rCap<iRoomCapacity) {
errors.add("roomPref",
new ActionMessage(
"errors.generic",
MSG.errorRequiredRoomTooSmall(room.getLabel(), rCap, iRoomCapacity)) );
}
}
}
}
// Check Other Preferences
errors.add(super.validate(mapping, request));
return errors;
}
/**
* Method reset
* @param mapping
* @param request
*/
public void reset(ActionMapping mapping, HttpServletRequest request) {
nbrRooms = null;
expectedCapacity = null;
classId = null;
section = null;
managingDept = null;
subpart = null;
className = "";
courseName = "";
courseTitle = "";
parentClassName = "-";
itypeDesc = "";
datePattern = null;
instrLead = DynamicList.getInstance(new ArrayList(), factoryInstructors);
managingDeptLabel = "-";
notes="";
displayInstructor = null;
schedulePrintNote = null;
classSuffix = null;
enabledForStudentScheduling = null;
maxExpectedCapacity = null;
roomRatio = null;
unlimitedEnroll = null;
//TODO Reservations Bypass - to be removed later
isCrosslisted = null;
// End Bypass
instructors = DynamicList.getInstance(new ArrayList(), factoryInstructors);
instrPctShare= DynamicList.getInstance(new ArrayList(), factoryInstructors);
assignments = null;
enrollment = null;
accommodation = null;
super.reset(mapping, request);
}
/**
* @return Returns the classId.
*/
public Long getClassId() {
return classId;
}
/**
* @param classId The classId to set.
*/
public void setClassId(Long classId) {
this.classId = classId;
}
/**
* @return Returns the section.
*/
public String getSection() {
return section;
}
/**
* @param section The section to set.
*/
public void setSection(String section) {
this.section = section;
}
/**
* @return Returns the className.
*/
public String getClassName() {
return className;
}
/**
* @param className The className to set.
*/
public void setClassName(String className) {
this.className = className;
}
/**
* @return Returns the assignments.
*/
public List getAssignments() {
return assignments;
}
/**
* @return Returns the assignments.
*/
public String getAssignments(int key) {
return assignments.get(key).toString();
}
/**
* @param key The key to set.
* @param value The value to set.
*/
public void setAssignments(int key, Object value) {
this.assignments.set(key, value);
}
/**
* @param assignments The assignments to set.
*/
public void setAssignments(List assignments) {
this.assignments = assignments;
}
public Long getDatePattern() {
return datePattern;
}
public void setDatePattern(Long datePattern) {
this.datePattern = datePattern;
}
/**
* @return Returns the expectedCapacity.
*/
public Integer getExpectedCapacity() {
return expectedCapacity;
}
/**
* @param expectedCapacity The expectedCapacity to set.
*/
public void setExpectedCapacity(Integer expectedCapacity) {
this.expectedCapacity = expectedCapacity;
}
/**
* @return Returns the instructors.
*/
public List getInstructors() {
return instructors;
}
/**
* @return Returns the instructors.
*/
public String getInstructors(int key) {
return instructors.get(key).toString();
}
/**
* @param key The key to set.
* @param value The value to set.
*/
public void setInstructors(int key, Object value) {
this.instructors.set(key, value);
}
/**
* @param instructors The instructors to set.
*/
public void setInstructors(List instructors) {
this.instructors = instructors;
}
/**
* @return Returns the instrLead.
*/
public List getInstrLead() {
return instrLead;
}
/**
* @param instrLead The instrLead to set.
*/
public void setInstrLead(List instrLead) {
this.instrLead = instrLead;
}
public void addInstrLead(String instructorId) {
instrLead.add(instructorId);
}
public String getInstrLead(int key) {
return instrLead.get(key).toString();
}
public void setInstrLead(int key, Object value) {
this.instrLead.set(key, value);
}
public boolean getInstrHasPref(int key) {
if (!"true".equals(getInstrLead(key)) && !"on".equals(getInstrLead(key))) return false;
String instructorId = getInstructors(key);
if (instructorId==null || instructorId.trim().length()==0 || instructorId.equals("-")) return false;
DepartmentalInstructor di = new DepartmentalInstructorDAO().get(Long.valueOf(instructorId));
if (di!=null && di.hasPreferences()) return true;
return false;
}
/**
* @return Returns the instrPctShare.
*/
public List getInstrPctShare() {
return instrPctShare;
}
/**
* @return Returns the instrPctShare.
*/
public String getInstrPctShare(int key) {
return instrPctShare.get(key).toString();
}
/**
* @param key The key to set.
* @param value The value to set.
*/
public void setInstrPctShare(int key, Object value) {
this.instrPctShare.set(key, value);
}
/**
* @param instrPctShare The instrPctShare to set.
*/
public void setInstrPctShare(List instrPctShare) {
this.instrPctShare = instrPctShare;
}
/**
* @return Returns the nbrRooms.
*/
public Integer getNbrRooms() {
return nbrRooms;
}
/**
* @param nbrRooms The nbrRooms to set.
*/
public void setNbrRooms(Integer nbrRooms) {
this.nbrRooms = nbrRooms;
}
/**
* @return Returns the managingDept.
*/
public Long getManagingDept() {
return managingDept;
}
/**
* @param managingDept The managingDept to set.
*/
public void setManagingDept(Long owner) {
this.managingDept = owner;
}
/**
* @return Returns the parent.
*/
public String getParentClassName() {
return parentClassName;
}
/**
* @param parent The parent to set.
*/
public void setParentClassName(String parentClassName) {
this.parentClassName = parentClassName;
}
/**
* @return Returns the subpart.
*/
public Long getSubpart() {
return subpart;
}
/**
* @param subpart The subpart to set.
*/
public void setSubpart(Long subpart) {
this.subpart = subpart;
}
/**
* @return Returns the itypeDesc.
*/
public String getItypeDesc() {
return itypeDesc;
}
/**
* @param itypeDesc The itypeDesc to set.
*/
public void setItypeDesc(String itypeDesc) {
this.itypeDesc = itypeDesc;
}
/**
* @return Returns the notes.
*/
public String getNotes() {
return notes;
}
/**
* @param notes The notes to set.
*/
public void setNotes(String notes) {
this.notes = notes;
}
/**
* @return Returns the managingDeptLabel.
*/
public String getManagingDeptLabel() {
return managingDeptLabel;
}
/**
* @param managingDeptLabel The managingDeptLabel to set.
*/
public void setManagingDeptLabel(String ownerLabel) {
this.managingDeptLabel = ownerLabel;
}
/**
* @return Returns the parentClassId.
*/
public Long getParentClassId() {
return parentClassId;
}
public String getSubjectAreaId() {
return subjectAreaId;
}
public void setSubjectAreaId(String subjectAreaId) {
this.subjectAreaId = subjectAreaId;
}
public String getInstrOfferingId() {
return instrOfferingId;
}
public void setInstrOfferingId(String instrOfferingId) {
this.instrOfferingId = instrOfferingId;
}
/**
* @param parentClassId The parentClassId to set.
*/
public void setParentClassId(Long parentClassId) {
this.parentClassId = parentClassId;
}
public String getClassSuffix() {
return classSuffix;
}
public void setClassSuffix(String classSuffix) {
this.classSuffix = classSuffix;
}
public Boolean getEnabledForStudentScheduling() {
return enabledForStudentScheduling;
}
public void setEnabledForStudentScheduling(Boolean enabledForStudentScheduling) {
this.enabledForStudentScheduling = enabledForStudentScheduling;
}
public Boolean getDisplayInstructor() {
return displayInstructor;
}
public void setDisplayInstructor(Boolean displayInstructor) {
this.displayInstructor = displayInstructor;
}
public Integer getMaxExpectedCapacity() {
return maxExpectedCapacity;
}
public void setMaxExpectedCapacity(Integer maxExpectedCapacity) {
this.maxExpectedCapacity = maxExpectedCapacity;
}
public Float getRoomRatio() {
return roomRatio;
}
public void setRoomRatio(Float roomRatio) {
this.roomRatio = roomRatio;
}
public String getSchedulePrintNote() {
return schedulePrintNote;
}
public void setSchedulePrintNote(String schedulePrintNote) {
this.schedulePrintNote = schedulePrintNote;
}
public Integer getMinRoomLimit() {
return minRoomLimit;
}
public void setMinRoomLimit(Integer minRoomLimit) {
this.minRoomLimit = minRoomLimit;
}
public Boolean getUnlimitedEnroll() {
return unlimitedEnroll;
}
public void setUnlimitedEnroll(Boolean unlimitedEnroll) {
this.unlimitedEnroll = unlimitedEnroll;
}
//TODO Reservations Bypass - to be removed later
public Boolean getIsCrosslisted() {
return isCrosslisted;
}
public void setIsCrosslisted(Boolean isCrosslisted) {
this.isCrosslisted = isCrosslisted;
}
// End Bypass
/**
* @param date
* @return String representation of the date formatted as mm/dd/yyyy
*/
public String dateToStr(Date date) {
if(date==null)
return "";
else
return dateFormat.format(date);
}
/**
* @param date String representation of the date ( mm/dd/yyyy )
* @return java.sql.Date object for the given string
* @throws ParseException
*/
public java.sql.Date strToDate(String date) throws ParseException {
java.sql.Date dt = null;
if(date==null || date.trim().length()==0)
return null;
else
dt = new java.sql.Date( dateFormat.parse(date).getTime() );
return dt;
}
/**
* Add Instructor Data to List
* If class instructor is null, a blank row is added
* @param classInstr
*/
public void addToInstructors(ClassInstructor classInstr) {
// Default values
String id = "";
String pctShare = "0";
boolean isLead = false;
// Class Instructor Specified
if(classInstr!=null) {
id = classInstr.getInstructor().getUniqueId().toString();
pctShare = classInstr.getPercentShare().toString();
isLead = classInstr.isLead().booleanValue();
}
else {
// If this is the only record - set 100% share and make lead
if(this.instructors.size()==0) {
pctShare = "100";
isLead = true;
}
}
// Add row
this.instructors.add(id);
this.instrPctShare.add(pctShare);
this.instrLead.add(isLead?"true":"false");
}
/**
* Remove Instructor from List
* @param deleteId
*/
public void removeInstructor(int deleteId) {
// Remove from lists
this.instructors.remove(deleteId);
this.instrPctShare.remove(deleteId);
if (this.instrLead.size()>deleteId)
this.instrLead.remove(deleteId);
}
/**
* Clears all preference lists
*/
public void clearPrefs() {
this.instructors.clear();
this.instrPctShare.clear();
this.instrLead.clear();
}
public String getCourseName() { return courseName; }
public void setCourseName(String courseName) { this.courseName=courseName; }
public String getCourseTitle() { return courseTitle; }
public void setCourseTitle(String courseTitle) { this.courseTitle=courseTitle; }
public Integer getEnrollment() {
return enrollment;
}
public void setEnrollment(Integer enrollment) {
this.enrollment = enrollment;
}
public String getAccommodation() { return accommodation; }
public void setAccommodation(String accommodation) { this.accommodation = accommodation; }
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* AttachmentSetItemResponseType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* AttachmentSetItemResponseType bean class
*/
public class AttachmentSetItemResponseType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = AttachmentSetItemResponseType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for VolumeId
*/
protected java.lang.String localVolumeId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getVolumeId(){
return localVolumeId;
}
/**
* Auto generated setter method
* @param param VolumeId
*/
public void setVolumeId(java.lang.String param){
this.localVolumeId=param;
}
/**
* field for InstanceId
*/
protected java.lang.String localInstanceId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getInstanceId(){
return localInstanceId;
}
/**
* Auto generated setter method
* @param param InstanceId
*/
public void setInstanceId(java.lang.String param){
this.localInstanceId=param;
}
/**
* field for Device
*/
protected java.lang.String localDevice ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getDevice(){
return localDevice;
}
/**
* Auto generated setter method
* @param param Device
*/
public void setDevice(java.lang.String param){
this.localDevice=param;
}
/**
* field for Status
*/
protected java.lang.String localStatus ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getStatus(){
return localStatus;
}
/**
* Auto generated setter method
* @param param Status
*/
public void setStatus(java.lang.String param){
this.localStatus=param;
}
/**
* field for AttachTime
*/
protected java.util.Calendar localAttachTime ;
/**
* Auto generated getter method
* @return java.util.Calendar
*/
public java.util.Calendar getAttachTime(){
return localAttachTime;
}
/**
* Auto generated setter method
* @param param AttachTime
*/
public void setAttachTime(java.util.Calendar param){
this.localAttachTime=param;
}
/**
* field for DeleteOnTermination
*/
protected boolean localDeleteOnTermination ;
/**
* Auto generated getter method
* @return boolean
*/
public boolean getDeleteOnTermination(){
return localDeleteOnTermination;
}
/**
* Auto generated setter method
* @param param DeleteOnTermination
*/
public void setDeleteOnTermination(boolean param){
this.localDeleteOnTermination=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
AttachmentSetItemResponseType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":AttachmentSetItemResponseType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"AttachmentSetItemResponseType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"volumeId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"volumeId");
}
} else {
xmlWriter.writeStartElement("volumeId");
}
if (localVolumeId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!");
}else{
xmlWriter.writeCharacters(localVolumeId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"instanceId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"instanceId");
}
} else {
xmlWriter.writeStartElement("instanceId");
}
if (localInstanceId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!");
}else{
xmlWriter.writeCharacters(localInstanceId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"device", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"device");
}
} else {
xmlWriter.writeStartElement("device");
}
if (localDevice==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("device cannot be null!!");
}else{
xmlWriter.writeCharacters(localDevice);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"status", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"status");
}
} else {
xmlWriter.writeStartElement("status");
}
if (localStatus==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("status cannot be null!!");
}else{
xmlWriter.writeCharacters(localStatus);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"attachTime", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"attachTime");
}
} else {
xmlWriter.writeStartElement("attachTime");
}
if (localAttachTime==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("attachTime cannot be null!!");
}else{
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localAttachTime));
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2009-10-31/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"deleteOnTermination", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"deleteOnTermination");
}
} else {
xmlWriter.writeStartElement("deleteOnTermination");
}
if (false) {
throw new org.apache.axis2.databinding.ADBException("deleteOnTermination cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDeleteOnTermination));
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"volumeId"));
if (localVolumeId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeId));
} else {
throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"instanceId"));
if (localInstanceId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localInstanceId));
} else {
throw new org.apache.axis2.databinding.ADBException("instanceId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"device"));
if (localDevice != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDevice));
} else {
throw new org.apache.axis2.databinding.ADBException("device cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"status"));
if (localStatus != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localStatus));
} else {
throw new org.apache.axis2.databinding.ADBException("status cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"attachTime"));
if (localAttachTime != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localAttachTime));
} else {
throw new org.apache.axis2.databinding.ADBException("attachTime cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"deleteOnTermination"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDeleteOnTermination));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static AttachmentSetItemResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
AttachmentSetItemResponseType object =
new AttachmentSetItemResponseType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"AttachmentSetItemResponseType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (AttachmentSetItemResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","volumeId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setVolumeId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","instanceId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setInstanceId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","device").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setDevice(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","status").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setStatus(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","attachTime").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setAttachTime(
org.apache.axis2.databinding.utils.ConverterUtil.convertToDateTime(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","deleteOnTermination").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setDeleteOnTermination(
org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.ds.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.BasicFeatureMap;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.InternalEList;
import org.wso2.developerstudio.eclipse.ds.DsPackage;
import org.wso2.developerstudio.eclipse.ds.QueryProperty;
import org.wso2.developerstudio.eclipse.ds.QueryPropertyList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Query Property List</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>
* {@link org.wso2.developerstudio.eclipse.ds.impl.QueryPropertyListImpl#getMixed
* <em>Mixed</em>}</li>
* <li>
* {@link org.wso2.developerstudio.eclipse.ds.impl.QueryPropertyListImpl#getProperty
* <em>Property</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class QueryPropertyListImpl extends EObjectImpl implements QueryPropertyList {
/**
* The cached value of the '{@link #getMixed() <em>Mixed</em>}' attribute
* list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @see #getMixed()
* @generated
* @ordered
*/
protected FeatureMap mixed;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
protected QueryPropertyListImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
protected EClass eStaticClass() {
return DsPackage.Literals.QUERY_PROPERTY_LIST;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
public FeatureMap getMixed() {
if (mixed == null) {
mixed = new BasicFeatureMap(this, DsPackage.QUERY_PROPERTY_LIST__MIXED);
}
return mixed;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
public EList<QueryProperty> getProperty() {
return getMixed().list(DsPackage.Literals.QUERY_PROPERTY_LIST__PROPERTY);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID,
NotificationChain msgs) {
switch (featureID) {
case DsPackage.QUERY_PROPERTY_LIST__MIXED:
return ((InternalEList<?>) getMixed()).basicRemove(otherEnd, msgs);
case DsPackage.QUERY_PROPERTY_LIST__PROPERTY:
return ((InternalEList<?>) getProperty()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DsPackage.QUERY_PROPERTY_LIST__MIXED:
if (coreType)
return getMixed();
return ((FeatureMap.Internal) getMixed()).getWrapper();
case DsPackage.QUERY_PROPERTY_LIST__PROPERTY:
return getProperty();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case DsPackage.QUERY_PROPERTY_LIST__MIXED:
((FeatureMap.Internal) getMixed()).set(newValue);
return;
case DsPackage.QUERY_PROPERTY_LIST__PROPERTY:
getProperty().clear();
getProperty().addAll((Collection<? extends QueryProperty>) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DsPackage.QUERY_PROPERTY_LIST__MIXED:
getMixed().clear();
return;
case DsPackage.QUERY_PROPERTY_LIST__PROPERTY:
getProperty().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DsPackage.QUERY_PROPERTY_LIST__MIXED:
return mixed != null && !mixed.isEmpty();
case DsPackage.QUERY_PROPERTY_LIST__PROPERTY:
return !getProperty().isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/
@Override
public String toString() {
if (eIsProxy())
return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (mixed: ");
result.append(mixed);
result.append(')');
return result.toString();
}
} // QueryPropertyListImpl
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.content.pm;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Overall information about the contents of a package. This corresponds
* to all of the information collected from AndroidManifest.xml.
*/
public class PackageInfo implements Parcelable {
/**
* The name of this package. From the <manifest> tag's "name"
* attribute.
*/
public String packageName;
/**
* The version number of this package, as specified by the <manifest>
* tag's {@link android.R.styleable#AndroidManifest_versionCode versionCode}
* attribute.
*/
public int versionCode;
/**
* The version name of this package, as specified by the <manifest>
* tag's {@link android.R.styleable#AndroidManifest_versionName versionName}
* attribute.
*/
public String versionName;
/**
* The shared user ID name of this package, as specified by the <manifest>
* tag's {@link android.R.styleable#AndroidManifest_sharedUserId sharedUserId}
* attribute.
*/
public String sharedUserId;
/**
* The shared user ID label of this package, as specified by the <manifest>
* tag's {@link android.R.styleable#AndroidManifest_sharedUserLabel sharedUserLabel}
* attribute.
*/
public int sharedUserLabel;
/**
* Information collected from the <application> tag, or null if
* there was none.
*/
public ApplicationInfo applicationInfo;
/**
* The time at which the app was first installed. Units are as
* per {@link System#currentTimeMillis()}.
*/
public long firstInstallTime;
/**
* The time at which the app was last updated. Units are as
* per {@link System#currentTimeMillis()}.
*/
public long lastUpdateTime;
/**
* All kernel group-IDs that have been assigned to this package.
* This is only filled in if the flag {@link PackageManager#GET_GIDS} was set.
*/
public int[] gids;
/**
* Array of all {@link android.R.styleable#AndroidManifestActivity
* <activity>} tags included under <application>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_ACTIVITIES} was set.
*/
public ActivityInfo[] activities;
/**
* Array of all {@link android.R.styleable#AndroidManifestReceiver
* <receiver>} tags included under <application>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_RECEIVERS} was set.
*/
public ActivityInfo[] receivers;
/**
* Array of all {@link android.R.styleable#AndroidManifestService
* <service>} tags included under <application>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_SERVICES} was set.
*/
public ServiceInfo[] services;
/**
* Array of all {@link android.R.styleable#AndroidManifestProvider
* <provider>} tags included under <application>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_PROVIDERS} was set.
*/
public ProviderInfo[] providers;
/**
* Array of all {@link android.R.styleable#AndroidManifestInstrumentation
* <instrumentation>} tags included under <manifest>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_INSTRUMENTATION} was set.
*/
public InstrumentationInfo[] instrumentation;
/**
* Array of all {@link android.R.styleable#AndroidManifestPermission
* <permission>} tags included under <manifest>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_PERMISSIONS} was set.
*/
public PermissionInfo[] permissions;
/**
* Array of all {@link android.R.styleable#AndroidManifestUsesPermission
* <uses-permission>} tags included under <manifest>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_PERMISSIONS} was set. This list includes
* all permissions requested, even those that were not granted or known
* by the system at install time.
*/
public String[] requestedPermissions;
/**
* Array of flags of all {@link android.R.styleable#AndroidManifestUsesPermission
* <uses-permission>} tags included under <manifest>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_PERMISSIONS} was set. Each value matches
* the corresponding entry in {@link #requestedPermissions}, and will have
* the flags {@link #REQUESTED_PERMISSION_REQUIRED} and
* {@link #REQUESTED_PERMISSION_GRANTED} set as appropriate.
*/
public int[] requestedPermissionsFlags;
/**
* Flag for {@link #requestedPermissionsFlags}: the requested permission
* is required for the application to run; the user can not optionally
* disable it. Currently all permissions are required.
*/
public static final int REQUESTED_PERMISSION_REQUIRED = 1<<0;
/**
* Flag for {@link #requestedPermissionsFlags}: the requested permission
* is currently granted to the application.
*/
public static final int REQUESTED_PERMISSION_GRANTED = 1<<1;
/**
* Array of all signatures read from the package file. This is only filled
* in if the flag {@link PackageManager#GET_SIGNATURES} was set.
*/
public Signature[] signatures;
/**
* Application specified preferred configuration
* {@link android.R.styleable#AndroidManifestUsesConfiguration
* <uses-configuration>} tags included under <manifest>,
* or null if there were none. This is only filled in if the flag
* {@link PackageManager#GET_CONFIGURATIONS} was set.
*/
public ConfigurationInfo[] configPreferences;
/**
* The features that this application has said it requires.
*/
public FeatureInfo[] reqFeatures;
/**
* Constant corresponding to <code>auto</code> in
* the {@link android.R.attr#installLocation} attribute.
* @hide
*/
public static final int INSTALL_LOCATION_UNSPECIFIED = -1;
/**
* Constant corresponding to <code>auto</code> in
* the {@link android.R.attr#installLocation} attribute.
* @hide
*/
public static final int INSTALL_LOCATION_AUTO = 0;
/**
* Constant corresponding to <code>internalOnly</code> in
* the {@link android.R.attr#installLocation} attribute.
* @hide
*/
public static final int INSTALL_LOCATION_INTERNAL_ONLY = 1;
/**
* Constant corresponding to <code>preferExternal</code> in
* the {@link android.R.attr#installLocation} attribute.
* @hide
*/
public static final int INSTALL_LOCATION_PREFER_EXTERNAL = 2;
/**
* The install location requested by the activity. From the
* {@link android.R.attr#installLocation} attribute, one of
* {@link #INSTALL_LOCATION_AUTO},
* {@link #INSTALL_LOCATION_INTERNAL_ONLY},
* {@link #INSTALL_LOCATION_PREFER_EXTERNAL}
* @hide
*/
public int installLocation = INSTALL_LOCATION_INTERNAL_ONLY;
public PackageInfo() {
}
public String toString() {
return "PackageInfo{"
+ Integer.toHexString(System.identityHashCode(this))
+ " " + packageName + "}";
}
public int describeContents() {
return 0;
}
public void writeToParcel(Parcel dest, int parcelableFlags) {
dest.writeString(packageName);
dest.writeInt(versionCode);
dest.writeString(versionName);
dest.writeString(sharedUserId);
dest.writeInt(sharedUserLabel);
if (applicationInfo != null) {
dest.writeInt(1);
applicationInfo.writeToParcel(dest, parcelableFlags);
} else {
dest.writeInt(0);
}
dest.writeLong(firstInstallTime);
dest.writeLong(lastUpdateTime);
dest.writeIntArray(gids);
dest.writeTypedArray(activities, parcelableFlags);
dest.writeTypedArray(receivers, parcelableFlags);
dest.writeTypedArray(services, parcelableFlags);
dest.writeTypedArray(providers, parcelableFlags);
dest.writeTypedArray(instrumentation, parcelableFlags);
dest.writeTypedArray(permissions, parcelableFlags);
dest.writeStringArray(requestedPermissions);
dest.writeIntArray(requestedPermissionsFlags);
dest.writeTypedArray(signatures, parcelableFlags);
dest.writeTypedArray(configPreferences, parcelableFlags);
dest.writeTypedArray(reqFeatures, parcelableFlags);
dest.writeInt(installLocation);
}
public static final Parcelable.Creator<PackageInfo> CREATOR
= new Parcelable.Creator<PackageInfo>() {
public PackageInfo createFromParcel(Parcel source) {
return new PackageInfo(source);
}
public PackageInfo[] newArray(int size) {
return new PackageInfo[size];
}
};
private PackageInfo(Parcel source) {
packageName = source.readString();
versionCode = source.readInt();
versionName = source.readString();
sharedUserId = source.readString();
sharedUserLabel = source.readInt();
int hasApp = source.readInt();
if (hasApp != 0) {
applicationInfo = ApplicationInfo.CREATOR.createFromParcel(source);
}
firstInstallTime = source.readLong();
lastUpdateTime = source.readLong();
gids = source.createIntArray();
activities = source.createTypedArray(ActivityInfo.CREATOR);
receivers = source.createTypedArray(ActivityInfo.CREATOR);
services = source.createTypedArray(ServiceInfo.CREATOR);
providers = source.createTypedArray(ProviderInfo.CREATOR);
instrumentation = source.createTypedArray(InstrumentationInfo.CREATOR);
permissions = source.createTypedArray(PermissionInfo.CREATOR);
requestedPermissions = source.createStringArray();
requestedPermissionsFlags = source.createIntArray();
signatures = source.createTypedArray(Signature.CREATOR);
configPreferences = source.createTypedArray(ConfigurationInfo.CREATOR);
reqFeatures = source.createTypedArray(FeatureInfo.CREATOR);
installLocation = source.readInt();
}
}
| |
package org.hisp.dhis.option;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.common.GenericIdentifiableObjectStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
/**
* @author Lars Helge Overland
*/
@Transactional
public class DefaultOptionService
implements OptionService
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private GenericIdentifiableObjectStore<OptionSet> optionSetStore;
public void setOptionSetStore( GenericIdentifiableObjectStore<OptionSet> optionSetStore )
{
this.optionSetStore = optionSetStore;
}
private OptionStore optionStore;
public void setOptionStore( OptionStore optionStore )
{
this.optionStore = optionStore;
}
@Autowired
private OptionGroupStore optionGroupStore;
@Autowired
private OptionGroupSetStore optionGroupSetStore;
// -------------------------------------------------------------------------
// OptionService implementation
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// Option Set
// -------------------------------------------------------------------------
@Override
public int saveOptionSet( OptionSet optionSet )
{
return optionSetStore.save( optionSet );
}
@Override
public void updateOptionSet( OptionSet optionSet )
{
optionSetStore.update( optionSet );
}
@Override
public OptionSet getOptionSet( int id )
{
return optionSetStore.get( id );
}
@Override
public OptionSet getOptionSet( String uid )
{
return optionSetStore.getByUid( uid );
}
@Override
public OptionSet getOptionSetByName( String name )
{
return optionSetStore.getByName( name );
}
@Override
public OptionSet getOptionSetByCode( String code )
{
return optionSetStore.getByCode( code );
}
@Override
public void deleteOptionSet( OptionSet optionSet )
{
optionSetStore.delete( optionSet );
}
@Override
public List<OptionSet> getAllOptionSets()
{
return optionSetStore.getAll();
}
// -------------------------------------------------------------------------
// Option
// -------------------------------------------------------------------------
@Override
public List<Option> getOptions( int optionSetId, String key, Integer max )
{
List<Option> options = null;
if ( key != null || max != null )
{
// Use query as option set size might be very high
options = optionStore.getOptions( optionSetId, key, max );
}
else
{
// Return all from object association to preserve custom order
OptionSet optionSet = getOptionSet( optionSetId );
options = new ArrayList<>( optionSet.getOptions() );
}
return options;
}
@Override
public void updateOption( Option option )
{
optionStore.update( option );
}
@Override
public Option getOption( int id )
{
return optionStore.get( id );
}
@Override
public Option getOptionByCode( String code )
{
return optionStore.getByCode( code );
}
@Override
public void deleteOption( Option option )
{
optionStore.delete( option );
}
// -------------------------------------------------------------------------
// OptionGroup
// -------------------------------------------------------------------------
@Override
public int saveOptionGroup( OptionGroup group )
{
return optionGroupStore.save( group );
}
@Override
public void updateOptionGroup( OptionGroup group )
{
optionGroupStore.update( group );
}
@Override
public OptionGroup getOptionGroup( int id )
{
return optionGroupStore.get( id );
}
@Override
public OptionGroup getOptionGroup( String uid )
{
return optionGroupStore.getByUid( uid );
}
@Override
public void deleteOptionGroup( OptionGroup group )
{
optionGroupStore.delete( group );
}
@Override
public List<OptionGroup> getAllOptionGroups()
{
return optionGroupStore.getAll();
}
@Override
public OptionGroup getOptionGroupByName( String name )
{
return optionGroupStore.getByName( name );
}
@Override
public OptionGroup getOptionGroupByCode( String code )
{
return optionGroupStore.getByCode( code );
}
@Override
public OptionGroup getOptionGroupByShortName( String shortName )
{
List<OptionGroup> OptionGroups = new ArrayList<>(
optionGroupStore.getAllEqShortName( shortName ) );
if ( OptionGroups.isEmpty() )
{
return null;
}
return OptionGroups.get( 0 );
}
// -------------------------------------------------------------------------
// OptionGroupSet
// -------------------------------------------------------------------------
@Override
public int saveOptionGroupSet( OptionGroupSet group )
{
return optionGroupSetStore.save( group );
}
@Override
public void updateOptionGroupSet( OptionGroupSet group )
{
optionGroupSetStore.update( group );
}
@Override
public OptionGroupSet getOptionGroupSet( int id )
{
return optionGroupSetStore.get( id );
}
@Override
public OptionGroupSet getOptionGroupSet( String uid )
{
return optionGroupSetStore.getByUid( uid );
}
@Override
public void deleteOptionGroupSet( OptionGroupSet group )
{
optionGroupSetStore.delete( group );
}
@Override
public List<OptionGroupSet> getAllOptionGroupSets()
{
return optionGroupSetStore.getAll();
}
@Override
public OptionGroupSet getOptionGroupSetByName( String name )
{
return optionGroupSetStore.getByName( name );
}
}
| |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.integration.tests;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.linkedin.pinot.common.metadata.ZKMetadataProvider;
import com.linkedin.pinot.common.metadata.segment.OfflineSegmentZKMetadata;
import com.linkedin.pinot.common.metadata.segment.RealtimeSegmentZKMetadata;
import com.linkedin.pinot.common.utils.CommonConstants;
import java.util.Collections;
import junit.framework.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Hybrid cluster integration test that uploads 8 months of data as offline and 6 months of data as realtime (with a
* two month overlap) then deletes segments different ways to test delete APIs. Expects at least 3 realtime segments
* and at least 3 offline segments.
*
*/
// TODO: clean up this test
public class DeleteAPIHybridClusterIntegrationTest extends HybridClusterIntegrationTest {
private String TABLE_NAME;
private long nOfflineRows;
@BeforeClass
public void setUp() throws Exception {
super.setUp();
TABLE_NAME = super.getTableName();
nOfflineRows = numRowsReturned(CommonConstants.Helix.TableType.OFFLINE);
}
private long numRowsReturned(CommonConstants.Helix.TableType tableType) throws Exception {
org.json.JSONObject response = postQuery("select count(*) from '" + TABLE_NAME + "_" + tableType + "'");
if (response.get("numDocsScanned").equals(new Integer(0))) {
return 0;
} else {
// Throws a null pointer exception when there are no rows because it can't find "aggregationResults"
String pinotValue =
((org.json.JSONArray) response.get("aggregationResults")).getJSONObject(0).get("value").toString();
return Long.parseLong(pinotValue);
}
}
// TODO: Find ways to refactor waitForNumRows and waitForSegmentsToBeInDeleteDirectory
private void waitForNumRows(long numRows, CommonConstants.Helix.TableType tableType) throws Exception {
long start = System.currentTimeMillis();
long end = start + 60 * 1000;
while (System.currentTimeMillis() < end) {
if (numRowsReturned(tableType) == numRows) {
return;
}
Thread.sleep(200);
}
Assert.fail("Operation took too long");
}
private void waitForSegmentsToBeInDeleteDirectory() throws Exception {
long start = System.currentTimeMillis();
long end = start + 60 * 1000;
while (System.currentTimeMillis() < end) {
if (ZKMetadataProvider.getOfflineSegmentZKMetadataListForTable(_propertyStore, TABLE_NAME).size() == 0) {
// Wait for actual file to be deleted. This doesn't currently work because .tar.gz files don't get deleted.
Thread.sleep(300);
return;
}
Thread.sleep(200);
}
Assert.fail("Operation took too long");
}
@Override // Leaving this out because it is done in the superclass
public void testGeneratedQueriesWithMultiValues() {
}
@Override // Leaving this out because it is done in the superclass
public void testQueriesFromQueryFile() {
}
@Override // Leaving this out because it is done in the superclass
public void testBrokerDebugOutput() {
}
@Test
public void deleteRealtimeSegmentFromGetAPI() throws Exception {
long currRealtimeRows = numRowsReturned(CommonConstants.Helix.TableType.REALTIME);
String segmentList = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.REALTIME.toString()));
JSONArray realtimeSegmentsList =
getSegmentsFromJsonSegmentAPI(segmentList, CommonConstants.Helix.TableType.REALTIME.toString());
String removedSegment = realtimeSegmentsList.get(0).toString();
long removedSegmentRows = getNumRowsFromRealtimeMetadata(removedSegment);
Assert.assertNotSame(removedSegmentRows, 0L);
sendGetRequest(_controllerRequestURLBuilder.
forDeleteSegmentWithGetAPI(TABLE_NAME, removedSegment, CommonConstants.Helix.TableType.REALTIME.toString()));
waitForNumRows(currRealtimeRows - removedSegmentRows, CommonConstants.Helix.TableType.REALTIME);
String postDeleteSegmentList = sendGetRequest(
_controllerRequestURLBuilder.forSegmentListAPIWithTableType(TABLE_NAME,
CommonConstants.Helix.TableType.REALTIME.toString()));
JSONArray realtimeSegmentsListReturn =
getSegmentsFromJsonSegmentAPI(postDeleteSegmentList, CommonConstants.Helix.TableType.REALTIME.toString());
realtimeSegmentsList.remove(removedSegment);
Assert.assertEquals(realtimeSegmentsListReturn, realtimeSegmentsList);
}
@Test
public void deleteRealtimeSegmentFromDeleteAPI() throws Exception {
long currRealtimeRows = numRowsReturned(CommonConstants.Helix.TableType.REALTIME);
String segmentList = sendGetRequest(_controllerRequestURLBuilder.forSegmentListAPIWithTableType(TABLE_NAME,
CommonConstants.Helix.TableType.REALTIME.toString()));
JSONArray realtimeSegmentsList =
getSegmentsFromJsonSegmentAPI(segmentList, CommonConstants.Helix.TableType.REALTIME.toString());
String removedSegment = realtimeSegmentsList.get(0).toString();
long removedSegmentRows = getNumRowsFromRealtimeMetadata(removedSegment);
Assert.assertNotSame(removedSegmentRows, 0L);
sendDeleteRequest(_controllerRequestURLBuilder.
forSegmentDeleteAPI(TABLE_NAME, removedSegment, CommonConstants.Helix.TableType.REALTIME.toString()));
waitForNumRows(currRealtimeRows - removedSegmentRows, CommonConstants.Helix.TableType.REALTIME);
String postDeleteSegmentList = sendGetRequest(
_controllerRequestURLBuilder.forSegmentListAPIWithTableType(TABLE_NAME,
CommonConstants.Helix.TableType.REALTIME.toString()));
JSONArray realtimeSegmentsListReturn =
getSegmentsFromJsonSegmentAPI(postDeleteSegmentList, CommonConstants.Helix.TableType.REALTIME.toString());
realtimeSegmentsList.remove(removedSegment);
Assert.assertEquals(realtimeSegmentsListReturn, realtimeSegmentsList);
}
// @Test TODO: Add back when we use LLC only
public void deleteAllRealtimeSegmentsFromGetAPI() throws Exception {
}
// @Test TODO: Add back when we use LLC only
public void deleteAllRealtimeSegmentsFromDeleteAPI() throws Exception {
}
@Test
public void deleteFromDeleteAPI() throws Exception {
String segmentList = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
JSONArray offlineSegmentsList =
getSegmentsFromJsonSegmentAPI(segmentList, CommonConstants.Helix.TableType.OFFLINE.toString());
Assert.assertNotNull(offlineSegmentsList);
String removedSegment = offlineSegmentsList.get(0).toString();
long removedSegmentRows = getNumRowsFromOfflineMetadata(removedSegment);
Assert.assertNotSame(removedSegmentRows, 0L);
sendDeleteRequest(_controllerRequestURLBuilder.
forSegmentDeleteAPI(TABLE_NAME, removedSegment, CommonConstants.Helix.TableType.OFFLINE.toString()));
waitForNumRows(nOfflineRows - removedSegmentRows, CommonConstants.Helix.TableType.OFFLINE);
String postDeleteSegmentList = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
JSONArray offlineSegmentsListReturn =
getSegmentsFromJsonSegmentAPI(postDeleteSegmentList, CommonConstants.Helix.TableType.OFFLINE.toString());
offlineSegmentsList.remove(removedSegment);
Assert.assertEquals(offlineSegmentsListReturn, offlineSegmentsList);
// Testing Delete All API here
sendDeleteRequest(_controllerRequestURLBuilder.
forSegmentDeleteAllAPI(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
waitForNumRows(0, CommonConstants.Helix.TableType.OFFLINE);
String postDeleteSegmentListAll = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
Assert.assertEquals(
getSegmentsFromJsonSegmentAPI(postDeleteSegmentListAll, CommonConstants.Helix.TableType.OFFLINE.toString()),
Collections.emptyList());
waitForSegmentsToBeInDeleteDirectory();
repushOfflineSegments();
}
@Override
@Test
public void testSegmentListApi() {
// Tested in HybridClusterIntegrationTest
}
@Test
public void deleteFromGetAPI() throws Exception {
String segmentList = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
JSONArray offlineSegmentsList =
getSegmentsFromJsonSegmentAPI(segmentList, CommonConstants.Helix.TableType.OFFLINE.toString());
String removedSegment = offlineSegmentsList.get(0).toString();
long removedSegmentRows = getNumRowsFromOfflineMetadata(removedSegment);
Assert.assertNotSame(removedSegmentRows, 0L);
sendGetRequest(_controllerRequestURLBuilder.
forDeleteSegmentWithGetAPI(TABLE_NAME, removedSegment, CommonConstants.Helix.TableType.OFFLINE.toString()));
waitForNumRows(nOfflineRows - removedSegmentRows, CommonConstants.Helix.TableType.OFFLINE);
String postDeleteSegmentList = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
JSONArray offlineSegmentsListReturn =
getSegmentsFromJsonSegmentAPI(postDeleteSegmentList, CommonConstants.Helix.TableType.OFFLINE.toString());
offlineSegmentsList.remove(removedSegment);
Assert.assertEquals(offlineSegmentsListReturn, offlineSegmentsList);
// Testing Delete All API here
sendGetRequest(_controllerRequestURLBuilder.
forDeleteAllSegmentsWithTypeWithGetAPI(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
waitForNumRows(0, CommonConstants.Helix.TableType.OFFLINE);
String postDeleteSegmentListAll = sendGetRequest(_controllerRequestURLBuilder.
forSegmentListAPIWithTableType(TABLE_NAME, CommonConstants.Helix.TableType.OFFLINE.toString()));
Assert.assertEquals(
getSegmentsFromJsonSegmentAPI(postDeleteSegmentListAll, CommonConstants.Helix.TableType.OFFLINE.toString()),
Collections.emptyList());
waitForSegmentsToBeInDeleteDirectory();
repushOfflineSegments();
}
private long getNumRowsFromOfflineMetadata(String segmentName) throws Exception {
OfflineSegmentZKMetadata segmentZKMetadata =
ZKMetadataProvider.getOfflineSegmentZKMetadata(_propertyStore, TABLE_NAME, segmentName);
return segmentZKMetadata.getTotalRawDocs();
}
private long getNumRowsFromRealtimeMetadata(String segmentName) {
RealtimeSegmentZKMetadata segmentZKMetadata =
ZKMetadataProvider.getRealtimeSegmentZKMetadata(_propertyStore, TABLE_NAME, segmentName);
return segmentZKMetadata.getTotalRawDocs();
}
private com.alibaba.fastjson.JSONArray getSegmentsFromJsonSegmentAPI(String json, String type) throws Exception {
JSONObject tableTypeAndSegments = (JSONObject) JSON.parseArray(json).get(0);
return (JSONArray) tableTypeAndSegments.get(type);
}
private void repushOfflineSegments() throws Exception {
uploadSegments(_tarDir);
waitForNumRows(nOfflineRows, CommonConstants.Helix.TableType.OFFLINE);
}
}
| |
package de.plushnikov.intellij.plugin.hack;
import com.sun.jna.*;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.PointerByReference;
import org.jetbrains.annotations.Nullable;
import java.util.function.Function;
public interface JNI extends Library {
JNI INSTANCE = Native.load("jvm", JNI.class);
/* JNI Result code */
int
// @formatter:off
JNI_OK = 0, /* success */
JNI_ERR = -1, /* unknown error */
JNI_EDETACHED = -2, /* thread detached from the VM */
JNI_EVERSION = -3, /* JNI version error */
JNI_ENOMEM = -4, /* not enough memory */
JNI_EEXIST = -5, /* VM already created */
JNI_EINVAL = -6; /* invalid arguments */
// @formatter:on
/* JNI VersionInfo */
int
// @formatter:off
JNI_VERSION_1_1 = 0x00010001,
JNI_VERSION_1_2 = 0x00010002,
JNI_VERSION_1_4 = 0x00010004,
JNI_VERSION_1_6 = 0x00010006,
JNI_VERSION_1_8 = 0x00010008,
JNI_VERSION_9 = 0x00090000,
JNI_VERSION_10 = 0x000a0000;
// @formatter:on
@Structure.FieldOrder("reserved")
class NativeInterface extends Structure {
public static class ByReference extends NativeInterface implements Structure.ByReference { }
public static class ByValue extends NativeInterface implements Structure.ByValue { }
public long reserved;
public NativeInterface() { }
public NativeInterface(final Pointer pointer) {
super(pointer);
autoRead();
}
}
@Structure.FieldOrder("functions")
class Env extends Structure {
public static class ByReference extends NativeInterface implements Structure.ByReference { }
public static class ByValue extends NativeInterface implements Structure.ByValue { }
public @Nullable NativeInterface.ByReference functions;
public Env() { }
public Env(final Pointer pointer) {
super(pointer);
autoRead();
}
}
@SuppressWarnings("ConstantConditions")
@Structure.FieldOrder("functions")
class JavaVM extends Structure {
public static class ByReference extends JavaVM implements Structure.ByReference { }
public static class ByValue extends JavaVM implements Structure.ByValue { }
@Structure.FieldOrder({
"reserved0",
"reserved1",
"reserved2",
"DestroyJavaVM",
"AttachCurrentThread",
"DetachCurrentThread",
"GetEnv",
"AttachCurrentThreadAsDaemon"
})
public static class InvokeInterface extends Structure {
public static class ByReference extends InvokeInterface implements Structure.ByReference { }
public static class ByValue extends InvokeInterface implements Structure.ByValue { }
public @Nullable Pointer reserved0, reserved1, reserved2;
public interface DestroyJavaVM extends Callback {
int invoke(Pointer p_javaVM);
}
public @Nullable DestroyJavaVM DestroyJavaVM;
public interface AttachCurrentThread extends Callback {
int invoke(Pointer p_javaVM, PointerByReference p_penv, Pointer p_args);
}
public @Nullable AttachCurrentThread AttachCurrentThread;
public interface DetachCurrentThread extends Callback {
int invoke(Pointer p_javaVM);
}
public @Nullable DetachCurrentThread DetachCurrentThread;
public interface GetEnv extends Callback {
int invoke(Pointer p_javaVM, PointerByReference p_penv, int version);
}
public @Nullable GetEnv GetEnv;
public interface AttachCurrentThreadAsDaemon extends Callback {
int invoke(Pointer p_javaVM, PointerByReference p_penv, Pointer p_args);
}
public @Nullable AttachCurrentThreadAsDaemon AttachCurrentThreadAsDaemon;
public InvokeInterface() { }
public InvokeInterface(final Pointer pointer) {
super(pointer);
}
}
public @Nullable InvokeInterface.ByReference functions;
public JavaVM() { }
public JavaVM(final Pointer pointer) {
super(pointer);
autoRead();
}
public void destroyJavaVM() throws LastErrorException {
checkJNIError(functions.DestroyJavaVM.invoke(getPointer()));
}
public void attachCurrentThread(final PointerByReference p_penv, final Pointer p_args) throws LastErrorException {
checkJNIError(functions.AttachCurrentThread.invoke(getPointer(), p_penv, p_args));
}
public void detachCurrentThread() throws LastErrorException {
checkJNIError(functions.DetachCurrentThread.invoke(getPointer()));
}
public <T> T getEnv(final Function<Pointer, ? extends T> mapper, final int version) throws LastErrorException {
final PointerByReference p_penv = new PointerByReference();
checkJNIError(functions.GetEnv.invoke(getPointer(), p_penv, version));
return mapper.apply(p_penv.getValue());
}
public void attachCurrentThreadAsDaemon(final PointerByReference p_penv, final Pointer p_args) throws LastErrorException {
checkJNIError(functions.AttachCurrentThreadAsDaemon.invoke(getPointer(), p_penv, p_args));
}
public JNI.Env jniEnv(final int version) throws LastErrorException {
return getEnv(JNI.Env::new, version);
}
public static JavaVM contextVM() { return new JavaVM(INSTANCE.contextVM()); }
}
interface Instrument extends Library {
Instrument INSTANCE = Native.load("instrument", Instrument.class);
int Agent_OnAttach(Pointer p_vm, String path, @Nullable Pointer p_reserved);
default void attachAgent(final String path, final @Nullable Pointer p_reserved) {
checkJNIError(Agent_OnAttach(JNI.INSTANCE.contextVM(), path, p_reserved));
}
default void attachAgent(final String path) { attachAgent(path, null); }
}
static void checkJNIError(final int jniReturnCode) throws LastErrorException {
if (jniReturnCode != JNI_OK)
throw new LastErrorException(jniReturnCode);
}
int JNI_GetDefaultJavaVMInitArgs(Pointer p_args);
int JNI_CreateJavaVM(PointerByReference p_vms, PointerByReference p_penv, Pointer p_args);
int JNI_GetCreatedJavaVMs(PointerByReference p_vms, int count, IntByReference p_found);
default Pointer contextVM() throws LastErrorException {
final PointerByReference p_vms = new PointerByReference();
final IntByReference p_found = new IntByReference();
checkJNIError(JNI.INSTANCE.JNI_GetCreatedJavaVMs(p_vms, 1, p_found));
return p_vms.getValue();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.bundleresource.impl;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.osgi.framework.Bundle;
/**
* The <code>BundleResourceCache</code> implements a simple caching for
* resources provided from a bundle. Each {@link BundleResourceProvider}
* instance uses an instance of this class to access the bundle resources (or
* bundle entries) through the cache.
* <p>
* The cache on the one hand caches single entries as URLs. The other part of
* the cache is for the child entries of a given bundle entry path. This caches
* lists of strings (entry path).
* <p>
* Currently the cache limits are fixed at {@value #CACHE_SIZE} for the entries
* cache and at {@value #LIST_CACHE_SIZE} for the child entries cache.
*/
class BundleResourceCache {
/**
* The maximum size of the single entry cache (value is 50).
*/
private static final int CACHE_SIZE = 50;
/**
* The maximum size of the child entry cache (value is 20).
*/
private static final int LIST_CACHE_SIZE = 20;
/**
* Sentinel for the single entry cache representing a missing entry to
* prevent looking for non-existing bundle entries multiple times (value is
* "file:///not_found").
*/
private static final URL NOT_FOUND_URL;
/**
* Sentinel for the child entry cache representing a missing child list for
* a given path to prevent looking for non-existing bundle entries multiple
* times (value is an empty list).
*/
private static final List<String> NOT_FOUND_CHILDREN = Collections.<String> emptyList();
/**
* Single entry cache. This is a synchronized map with a size limit.
*/
private final Map<String, URL> cache;
/**
* The child entry cache. This is a synchronized map with a size limit.
*/
private final Map<String, List<String>> listCache;
/**
* The Bundle providing the resource entries.
*/
private final Bundle bundle;
// static initializer setting the NOT_FOUND_URL. Because the
// constructor may throw an exception we use a static initializer
// which fails the class initialization in the unlikely case
// of the URL constructor failing.
static {
try {
NOT_FOUND_URL = new URL("file:/not_found");
} catch (MalformedURLException mue) {
throw new ExceptionInInitializerError(mue);
}
}
/**
* Creates a new instance of this class providing access to the entries in
* the given <code>bundle</code>.
*
* @param bundle
*/
BundleResourceCache(Bundle bundle) {
this.bundle = bundle;
// create the limited maps wrapping in synchronized maps
this.cache = Collections.synchronizedMap(new BundleResourceMap<String, URL>(
CACHE_SIZE));
this.listCache = Collections.synchronizedMap(new BundleResourceMap<String, List<String>>(
LIST_CACHE_SIZE));
}
/**
* Returns the <code>Bundle</code> to which this instance provides access.
*/
Bundle getBundle() {
return bundle;
}
/**
* Returns the entry in the underlying bundle at the given path. This path
* is assumed to be an absolute path. If relative it is resolved relative to
* the bundle root.
* <p>
* This method is backed by the <code>Bundle.getEntry(String)</code>
* method.
*
* @param path The path to the bundle entry to return
* @return The URL to access the bundle entry or <code>null</code> if the
* bundle does not contain the request entry.
*/
URL getEntry(String path) {
URL url = cache.get(path);
if (url == null) {
url = bundle.getEntry(path);
if (url == null) {
url = NOT_FOUND_URL;
}
cache.put(path, url);
}
return (url == NOT_FOUND_URL) ? null : url;
}
/**
* Returns a list of bundle entry paths considered children of the given
* <code>parentPath</code>. This parent path is assumed to be an absolute
* path. If relative it is resolved relative to the bundle root.
* <p>
* This method is backed by the <code>Bundle.getEntryPaths(String)</code>
* method but returns an <code>Iterator<String></code> instead of an
* <code>Enumeration</code> of strings.
*
* @param parentPath The path to the parent entry whose child entries are to
* be returned.
* @return An <code>Iterator<String></code> providing the paths of
* entries considered direct children of the <code>parentPath</code>
* or <code>null</code> if the parent entry does not exist.
*/
Iterator<String> getEntryPaths(String path) {
List<String> list = listCache.get(path);
if (list == null) {
@SuppressWarnings("unchecked")
Enumeration<String> entries = bundle.getEntryPaths(path);
if (entries != null && entries.hasMoreElements()) {
list = new LinkedList<String>();
while (entries.hasMoreElements()) {
list.add(entries.nextElement());
}
}
if (list == null) {
list = NOT_FOUND_CHILDREN;
}
listCache.put(path, list);
}
return (list == NOT_FOUND_CHILDREN) ? null : list.iterator();
}
// ---------- Management API
/**
* Returns the current number of entries stored in the entry cache. This
* number includes "negative" entries, which are requested entries not found
* in the bundle.
*/
int getEntryCacheSize() {
return cache.size();
}
/**
* Returns the maximum number of entries to be stored in the cache. This
* number is currently fixed at {@link #CACHE_SIZE}
*/
int getEntryCacheMaxSize() {
return CACHE_SIZE;
}
/**
* Returns the current number of list entries stored in the list cache. This
* number includes "negative" list entries, which are requested list entries
* not found in the bundle.
*/
int getListCacheSize() {
return listCache.size();
}
/**
* Returns the maximum number of list entries to be stored in the cache.
* This number is currently fixed at {@link #LIST_CACHE_SIZE}
*/
int getListCacheMaxSize() {
return LIST_CACHE_SIZE;
}
// ---------- inner class
/**
* The <code>BundleResourceMap</code> class extends the
* <code>LinkedHashMap</code> class overwriting the
* {@link #removeEldestEntry(Entry)} method to implement the size limit,
* which is set in the constructor.
*/
private static class BundleResourceMap<K, V> extends
LinkedHashMap<String, V> {
private static final long serialVersionUID = 7455098291380945276L;
/**
* The default size of a bundle resource cache (value is 20).
*/
private static final int DEFAULT_LIMIT = 20;
/**
* The limit configured for this map.
*/
private final int limit;
/**
* Creates a new instance of this size limited map.
*
* @param limit The maximum number of entries in this map. If this value
* is less than or equal to zero, the default size of
* {@link #DEFAULT_LIMIT} is used.
*/
BundleResourceMap(int limit) {
// deliberately chosen initial size and load factor, but
// we need the access-order to implement the LRU mechanism
super(8, 0.75f, true);
// normalize size to a possitive number
if (limit <= 0) {
limit = DEFAULT_LIMIT;
}
this.limit = limit;
}
/**
* Returns <code>true</code> if the current number of elements in the
* map exceeds the configured limit.
*/
@Override
protected boolean removeEldestEntry(Entry<String, V> eldest) {
return size() > limit;
}
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2016 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.clockss;
import org.apache.commons.collections.map.MultiValueMap;
import org.lockss.plugin.clockss.SourceXmlSchemaHelper;
import org.lockss.util.*;
import org.lockss.extractor.*;
import org.lockss.extractor.XmlDomMetadataExtractor.NodeValue;
import org.lockss.extractor.XmlDomMetadataExtractor.XPathValue;
import java.util.*;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* A helper class that defines a schema for XML metadata extraction for
* Onix 3 both short and long form
* @author alexohlson
*/
public class Onix3BooksSchemaHelper
implements SourceXmlSchemaHelper {
static Logger log = Logger.getLogger(Onix3BooksSchemaHelper.class);
private static final String AUTHOR_SEPARATOR = ",";
/*
* ONIX 3.0 specific node evaluators to extract the information we want
*/
/*
* ProductIdentifier: handles ISBN13, DOI & LCCN
* NODE=<ProductIdentifier>
* ProductIDType/
* IDValue/
* xPath that gets here has already figured out which type of ID it is
*/
static private final NodeValue ONIX_ID_VALUE = new NodeValue() {
@Override
public String getValue(Node node) {
if (node == null) {
return null;
}
log.debug3("getValue of ONIX ID");
// the TYPE has already been captured by xpath search in raw key
String idVal = null;
NodeList childNodes = node.getChildNodes();
for (int m = 0; m < childNodes.getLength(); m++) {
Node infoNode = childNodes.item(m);
if ( "IDValue".equals(infoNode.getNodeName()) | "b244".equals(infoNode.getNodeName())) {
idVal = infoNode.getTextContent();
break;
}
}
if (idVal != null) {
return idVal;
} else {
log.debug3("no IDVal in this productIdentifier");
return null;
}
}
};
/*
* AUTHOR information
* NODE=<Contributor>
* ContributorRole/
* not all of these will necessarily be there...
* NamesBeforeKey/
* KeyNames/
* PersonName/
* PersonNameInverted
*/
static private final NodeValue ONIX_AUTHOR_VALUE = new NodeValue() {
@Override
public String getValue(Node node) {
if (node == null) {
return null;
}
log.debug3("getValue of ONIX contributor");
String auType = null;
String auKey = null;
String auBeforeKey = null;
String straightName = null;
String invertedName = null;
NodeList childNodes = node.getChildNodes();
for (int m = 0; m < childNodes.getLength(); m++) {
Node infoNode = childNodes.item(m);
String nodeName = infoNode.getNodeName();
if ("ContributorRole".equals(nodeName) || "b035".equals(nodeName) ) {
auType = infoNode.getTextContent();
} else if ("NamesBeforeKey".equals(nodeName) || "b039".equals(nodeName)) {
auBeforeKey = infoNode.getTextContent();
} else if ("KeyNames".equals(nodeName) || "b040".equals(nodeName)) {
auKey = infoNode.getTextContent();
} else if ("PersonName".equals(nodeName) || "b036".equals(nodeName)) {
straightName = infoNode.getTextContent();
} else if ("PersonNameInverted".equals(nodeName) || "b037".equals(nodeName)) {
invertedName = infoNode.getTextContent();
} else if ("CorporateName".equals(nodeName) || "b047".equals(nodeName)) {
straightName = infoNode.getTextContent(); // organization, not person
}
}
// We may choose to limit the type of roles, but not sure which yet
if (auType != null) {
// first choice, PersonNameInverted
if (invertedName != null) {
return invertedName;
} else if (auKey != null) { // otherwise use KeyNames, NamesBeforeKey
StringBuilder valbuilder = new StringBuilder();
valbuilder.append(auKey);
if (auBeforeKey != null) {
valbuilder.append(AUTHOR_SEPARATOR + " " + auBeforeKey);
}
return valbuilder.toString();
} else if (straightName != null) { //otherwise use PersonName
return straightName;
}
log.debug3("No valid contributor in this contributor node.");
return null;
}
return null;
}
};
/*
* TITLE INFORMATION
* TitleElementLevel= 01 is the book title
* TitleElementLevel= 03 is the chapter title and indicates this is a book chapter
* <TitleElement>
* TitleElementLevel=01 <--this is the one we want
* TitleText
* Subtitle
* or sometimes:
* TitlePrefix
* TitleWithoutPrefix
* </TitleElement>
*/
static private final NodeValue ONIX_TITLE_VALUE = new NodeValue() {
@Override
public String getValue(Node node) {
log.debug3("getValue of ONIX level 01 title or level 03 chapter title ");
NodeList elementChildren = node.getChildNodes();
if (elementChildren == null) return null;
String tTitle = null;
String tSubtitle = null;
String tPrefix = null;
String tNoPrefix = null;
// look at each child of the TitleElement for information
for (int j = 0; j < elementChildren.getLength(); j++) {
Node checkNode = elementChildren.item(j);
if ("TitleText".equals(checkNode.getNodeName()) || "b203".equals(checkNode.getNodeName())) {
tTitle = checkNode.getTextContent();
} else if ("Subtitle".equals(checkNode.getNodeName()) || "b029".equals(checkNode.getNodeName())) {
tSubtitle = checkNode.getTextContent();
} else if ("TitlePrefix".equals(checkNode.getNodeName()) || "b030".equals(checkNode.getNodeName())) {
tPrefix = checkNode.getTextContent();
} else if ("TitleWithoutPrefix".equals(checkNode.getNodeName()) || "b031".equals(checkNode.getNodeName())) {
tNoPrefix = checkNode.getTextContent();
}
}
StringBuilder valbuilder = new StringBuilder();
if (tTitle != null) {
valbuilder.append(tTitle);
if (tSubtitle != null) {
valbuilder.append(": " + tSubtitle);
}
} else if (tNoPrefix != null) {
if (tPrefix != null) {
valbuilder.append(tPrefix + " ");
}
valbuilder.append(tNoPrefix);
if (tSubtitle != null) {
valbuilder.append(": " + tSubtitle);
}
} else {
log.debug3("no title found");
return null;
}
log.debug3("title found: " + valbuilder.toString());
return valbuilder.toString();
}
};
/*
* PUBLISHING DATE - could be under one of two nodes
* NODE=<PublishingDate/>
* <PublishingDateRole/>
* <Date dateformat="xx"/> // unspecified format means YYYYMMDD
*
* NODE=<MarketDate/>
* <MarketDateRole/>
* <Date dateformat="xx"/> // unspecified format means YYYYMMDD
*/
static private final NodeValue ONIX_DATE_VALUE = new NodeValue() {
@Override
public String getValue(Node node) {
log.debug3("getValue of ONIX date");
NodeList childNodes = node.getChildNodes();
if (childNodes == null) return null;
String dRole = null;
String dFormat = null;
String dDate = null;
String RoleName = "PublishingDateRole";
String shortRoleName = "x448";
// short form is just all lower case
if (!(node.getNodeName().equalsIgnoreCase("PublishingDate"))) {
RoleName = "MarketDateRole";
shortRoleName = "j408";
}
for (int m = 0; m < childNodes.getLength(); m++) {
Node childNode = childNodes.item(m);
if ((childNode.getNodeName().equals(RoleName)) || childNode.getNodeName().equals(shortRoleName)){
dRole = childNode.getTextContent();
} else if ((childNode.getNodeName().equals("Date")) || childNode.getNodeName().equals("b306")) {
dDate = childNode.getTextContent();
// get the format - try both short and long...
dFormat = ((Element)childNode).getAttribute("dateformat");
if ((dFormat == null) || dFormat.isEmpty()) {
dFormat = ((Element)childNode).getAttribute("j260");
if ((dFormat == null) || dFormat.isEmpty()) {
//default
dFormat = "00";
}
}
}
}
if (!(dRole.equals("01") || dRole.equals("11") || dRole.equals("12")) ) {
// not a type of date role we care about
return null;
}
if (dFormat.equals("00") && (dDate.length() > 7)) {
// do a length check in case the date format is mis-labeled
// make it W3C format instead of YYYYMMDD
StringBuilder dBuilder = new StringBuilder();
dBuilder.append(dDate.substring(0, 4)); //YYYY
dBuilder.append("-");
dBuilder.append(dDate.substring(4, 6)); //MM
dBuilder.append("-");
dBuilder.append(dDate.substring(6, 8)); //DD
return dBuilder.toString();
} else if (dFormat.equals("01") || dFormat.equals("02")
|| dFormat.equals("03")
|| dFormat.equals("04") || dFormat.equals("05")) {
// the year is the first four chars of the string in any of these
return (dDate.substring(0, 4)); //YYYY
} else {
return dDate; //not sure what the format is, just return it as is
}
}
};
/*
* ONIX specific XPATH key definitions that we care about
*/
public static String ONIX_RR = "RecordReference|a001";
/* Under an item node, the interesting bits live at these relative locations */
protected static String ONIX_idtype_isbn13 =
"ProductIdentifier[ProductIDType='15'] | productidentifier[b221='15']";
private static String ONIX_idtype_lccn =
"ProductIdentifier[ProductIDType='13'] | productidentifier[b221='13']";
public static String ONIX_idtype_doi =
"ProductIdentifier[ProductIDType='06'] | productidentifier[b221='06']";
// this one may have different meanings for different publishers
// so just collect it by default in to the raw metadata
public static String ONIX_idtype_proprietary =
"ProductIdentifier[ProductIDType='01'] | productidentifier[b221='01']";
/* components under DescriptiveDetail */
private static String ONIX_product_form =
"DescriptiveDetail/ProductFormDetail | descriptivedetail/b333";
/* only pick up level01 title element - allow for no leading 0...(bradypus) */
private static String ONIX_product_title =
"DescriptiveDetail/TitleDetail[TitleType = '01' or TitleType = '1']/TitleElement[TitleElementLevel = '01'] | descriptivedetail/titledetail[b202 = '01' or b202 = '1']/titleelement[x409 = '01']";
private static String ONIX_chapter_title =
"DescriptiveDetail/TitleDetail[TitleType = '01' or TitleType = '1']/TitleElement[TitleElementLevel = '04'] | descriptivedetail/titledetail[b202 = '01' or b202 = '1']/titleelement[x409 = '04']";
private static String ONIX_product_contrib =
"DescriptiveDetail/Contributor | descriptivedetail/contributor";
private static String ONIX_product_comp =
"DescriptiveDetail/ProductComposition | descriptivedetail/x314";
/* components under DescriptiveDetail if this is part of series */
private static String ONIX_product_seriestitle =
"DescriptiveDetail/Collection/TitleDetail/TitleElement[TitleElementLevel = '01'] | descriptivedetail/collection/titledetail/titleelement[x409 = '01']";
private static String ONIX_product_seriesISSN =
"DescriptiveDetail/Collection/CollectionIdentifier[CollectionIDType = '02'] | descriptivedetail/collection/collectionidentifier[x344= '02']";
/* components under PublishingDetail */
private static String ONIX_pub_name =
"PublishingDetail/Publisher/PublisherName | publishingdetail/publisher/b081";
private static String ONIX_pub_date =
"PublishingDetail/PublishingDate | publishingdetail/publishingdate";
// expose this for access from post-cook
public static String ONIX_copy_date =
"PublishingDetail/CopyrightStatement/CopyrightYear | publishingdetail/copyrightstatement/b087";
/* components under MarketPublishingDetail */
private static String ONIX_mkt_date =
"ProductSupply/MarketPublishingDetail/MarketDate | productsupply/marketpublishingdetail/marketdate";
/*
* The following 3 variables are needed to construct the XPathXmlMetadataParser
*/
/* 1. MAP associating xpath with value type with evaluator */
static private final Map<String,XPathValue> ONIX_articleMap =
new HashMap<String,XPathValue>();
static {
ONIX_articleMap.put(ONIX_RR, XmlDomMetadataExtractor.TEXT_VALUE);
ONIX_articleMap.put(ONIX_idtype_isbn13, ONIX_ID_VALUE);
ONIX_articleMap.put(ONIX_idtype_lccn, ONIX_ID_VALUE);
ONIX_articleMap.put(ONIX_idtype_doi, ONIX_ID_VALUE);
ONIX_articleMap.put(ONIX_idtype_proprietary, ONIX_ID_VALUE);
ONIX_articleMap.put(ONIX_product_form, XmlDomMetadataExtractor.TEXT_VALUE);
ONIX_articleMap.put(ONIX_product_title, ONIX_TITLE_VALUE);
ONIX_articleMap.put(ONIX_chapter_title, ONIX_TITLE_VALUE);
ONIX_articleMap.put(ONIX_product_contrib, ONIX_AUTHOR_VALUE);
ONIX_articleMap.put(ONIX_product_comp, XmlDomMetadataExtractor.TEXT_VALUE);
ONIX_articleMap.put(ONIX_pub_name, XmlDomMetadataExtractor.TEXT_VALUE);
ONIX_articleMap.put(ONIX_pub_date, ONIX_DATE_VALUE);
ONIX_articleMap.put(ONIX_mkt_date, ONIX_DATE_VALUE);
ONIX_articleMap.put(ONIX_copy_date, XmlDomMetadataExtractor.TEXT_VALUE);
ONIX_articleMap.put(ONIX_product_seriestitle, ONIX_TITLE_VALUE);
ONIX_articleMap.put(ONIX_product_seriesISSN, ONIX_ID_VALUE);
}
/* 2. Each item (book) has its own subNode */
static private final String ONIX_articleNode = "//Product|//product";
/* 3. in ONIX, there is no global information we care about, it is repeated per article */
static private final Map<String,XPathValue> ONIX_globalMap = null;
/*
* The emitter will need a map to know how to cook ONIX raw values
*/
private static final MultiValueMap cookMap = new MultiValueMap();
static {
// do NOT cook publisher_name; get from TDB file for consistency
cookMap.put(ONIX_idtype_isbn13, MetadataField.FIELD_ISBN);
cookMap.put(ONIX_idtype_doi, MetadataField.FIELD_DOI);
cookMap.put(ONIX_product_title, MetadataField.FIELD_PUBLICATION_TITLE);
cookMap.put(ONIX_chapter_title, MetadataField.FIELD_ARTICLE_TITLE);
cookMap.put(ONIX_product_contrib, MetadataField.FIELD_AUTHOR);
cookMap.put(ONIX_pub_date, MetadataField.FIELD_DATE);
cookMap.put(ONIX_pub_name, MetadataField.FIELD_PUBLISHER);
// TODO - after priority setting is allowed in cooking
//cookMap.put(ONIX_mkt_date, MetadataField.FIELD_DATE);
//cookMap.put(ONIX_copy_date, MetadataField.FIELD_DATE);
//TODO: If book is part of series, currently no way to store title,issn
//TODO: If book is part of series, currently no way to store title,issn
//cookMap.put(ONIX_product_seriestitle, MetadataField.FIELD_SERIES_TITLE);
//cookMap.put(ONIX_product_seriesISSN, MetadataField.FIELD_SERIES_ISSN);
//TODO: Book, BookSeries...currently no key field to put the information in to
//cookMap.put(ONIX_product_pub + "/PublishingComposition", ONIX_FIELD_TYPE);
//TODO: currently no way to store multiple formats in MetadataField (FIELD_FORMAT is a single);
}
/**
* ONIX3 does not contain needed global information outside of article records
* return NULL
*/
@Override
public Map<String, XPathValue> getGlobalMetaMap() {
return ONIX_globalMap;
}
/**
* return ONIX3 article paths representing metadata of interest
*/
@Override
public Map<String, XPathValue> getArticleMetaMap() {
return ONIX_articleMap;
}
/**
* Return the article node path
*/
@Override
public String getArticleNode() {
return ONIX_articleNode;
}
/**
* Return a map to translate raw values to cooked values
*/
@Override
public MultiValueMap getCookMap() {
return cookMap;
}
/**
* Return the path for isbn13 so multiple records for the same item
* can be combined
*/
@Override
public String getDeDuplicationXPathKey() {
return ONIX_idtype_isbn13;
}
/**
* Return the path for product form so when multiple records for the same
* item are combined, the product forms are combined together
*/
@Override
public String getConsolidationXPathKey() {
return ONIX_product_form;
}
/**
* The filenames are based on the isbn13 value
*/
@Override
public String getFilenameXPathKey() {
return ONIX_idtype_isbn13;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.table.record;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.config.SiddhiQueryContext;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.state.StateEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.event.stream.StreamEventCloner;
import io.siddhi.core.event.stream.StreamEventFactory;
import io.siddhi.core.exception.ConnectionUnavailableException;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.executor.VariableExpressionExecutor;
import io.siddhi.core.table.CompiledUpdateSet;
import io.siddhi.core.table.Table;
import io.siddhi.core.util.collection.AddingStreamEventExtractor;
import io.siddhi.core.util.collection.operator.CompiledCondition;
import io.siddhi.core.util.collection.operator.CompiledExpression;
import io.siddhi.core.util.collection.operator.MatchingMetaInfoHolder;
import io.siddhi.core.util.config.ConfigReader;
import io.siddhi.query.api.definition.TableDefinition;
import io.siddhi.query.api.execution.query.output.stream.UpdateSet;
import io.siddhi.query.api.expression.Expression;
import org.apache.log4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* An abstract implementation of table. Abstract implementation will handle {@link ComplexEventChunk} so that
* developer can directly work with event data.
*/
public abstract class AbstractRecordTable extends Table {
private static final Logger log = Logger.getLogger(AbstractRecordTable.class);
protected StreamEventFactory storeEventPool;
protected RecordTableHandler recordTableHandler;
@Override
public void init(TableDefinition tableDefinition, StreamEventFactory storeEventPool,
StreamEventCloner storeEventCloner, ConfigReader configReader, SiddhiAppContext
siddhiAppContext, RecordTableHandler recordTableHandler) {
if (recordTableHandler != null) {
recordTableHandler.init(tableDefinition, new RecordTableHandlerCallback(this),
siddhiAppContext);
}
this.recordTableHandler = recordTableHandler;
this.storeEventPool = storeEventPool;
init(tableDefinition, configReader);
initCache(tableDefinition, siddhiAppContext, storeEventCloner, configReader);
}
protected abstract void initCache(TableDefinition tableDefinition, SiddhiAppContext siddhiAppContext,
StreamEventCloner storeEventCloner, ConfigReader configReader);
/**
* Initializing the Record Table
*
* @param tableDefinition definition of the table with annotations if any
* @param configReader this hold the {@link AbstractRecordTable} configuration reader.
*/
protected abstract void init(TableDefinition tableDefinition, ConfigReader configReader);
@Override
public TableDefinition getTableDefinition() {
return tableDefinition;
}
@Override
public void add(ComplexEventChunk<StreamEvent> addingEventChunk) throws ConnectionUnavailableException {
List<Object[]> records = new ArrayList<>();
addingEventChunk.reset();
long timestamp = 0L;
while (addingEventChunk.hasNext()) {
StreamEvent event = addingEventChunk.next();
records.add(event.getOutputData());
timestamp = event.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.add(timestamp, records);
} else {
add(records);
}
}
/**
* Add records to the Table
*
* @param records records that need to be added to the table, each Object[] represent a record and it will match
* the attributes of the Table Definition.
* @throws ConnectionUnavailableException
*/
protected abstract void add(List<Object[]> records) throws ConnectionUnavailableException;
@Override
public StreamEvent find(CompiledCondition compiledCondition, StateEvent matchingEvent)
throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition =
((RecordStoreCompiledCondition) compiledCondition);
Map<String, Object> findConditionParameterMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap
.entrySet()) {
findConditionParameterMap.put(entry.getKey(), entry.getValue().execute(matchingEvent));
}
Iterator<Object[]> records;
if (recordTableHandler != null) {
records = recordTableHandler.find(matchingEvent.getTimestamp(), findConditionParameterMap,
recordStoreCompiledCondition.compiledCondition);
} else {
records = find(findConditionParameterMap, recordStoreCompiledCondition.compiledCondition);
}
ComplexEventChunk<StreamEvent> streamEventComplexEventChunk = new ComplexEventChunk<>(true);
if (records != null) {
while (records.hasNext()) {
Object[] record = records.next();
StreamEvent streamEvent = storeEventPool.newInstance();
System.arraycopy(record, 0, streamEvent.getOutputData(), 0, record.length);
streamEventComplexEventChunk.add(streamEvent);
}
}
return streamEventComplexEventChunk.getFirst();
}
/**
* Find records matching the compiled condition
*
* @param findConditionParameterMap map of matching StreamVariable Ids and their values
* corresponding to the compiled condition
* @param compiledCondition the compiledCondition against which records should be matched
* @return RecordIterator of matching records
* @throws ConnectionUnavailableException
*/
protected abstract RecordIterator<Object[]> find(Map<String, Object> findConditionParameterMap,
CompiledCondition compiledCondition)
throws ConnectionUnavailableException;
@Override
public boolean contains(StateEvent matchingEvent, CompiledCondition compiledCondition)
throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition =
((RecordStoreCompiledCondition) compiledCondition);
Map<String, Object> containsConditionParameterMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
containsConditionParameterMap.put(entry.getKey(), entry.getValue().execute(matchingEvent));
}
if (recordTableHandler != null) {
return recordTableHandler.contains(matchingEvent.getTimestamp(), containsConditionParameterMap,
recordStoreCompiledCondition.compiledCondition);
} else {
return contains(containsConditionParameterMap, recordStoreCompiledCondition.compiledCondition);
}
}
/**
* Check if matching record exist
*
* @param containsConditionParameterMap map of matching StreamVariable Ids and their values corresponding to the
* compiled condition
* @param compiledCondition the compiledCondition against which records should be matched
* @return if matching record found or not
* @throws ConnectionUnavailableException
*/
protected abstract boolean contains(Map<String, Object> containsConditionParameterMap,
CompiledCondition compiledCondition)
throws ConnectionUnavailableException;
@Override
public void delete(ComplexEventChunk<StateEvent> deletingEventChunk, CompiledCondition compiledCondition)
throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition =
((RecordStoreCompiledCondition) compiledCondition);
List<Map<String, Object>> deleteConditionParameterMaps = new ArrayList<>();
deletingEventChunk.reset();
long timestamp = 0L;
while (deletingEventChunk.hasNext()) {
StateEvent stateEvent = deletingEventChunk.next();
Map<String, Object> variableMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
deleteConditionParameterMaps.add(variableMap);
timestamp = stateEvent.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.delete(timestamp, deleteConditionParameterMaps, recordStoreCompiledCondition.
compiledCondition);
} else {
delete(deleteConditionParameterMaps, recordStoreCompiledCondition.compiledCondition);
}
}
/**
* Delete all matching records
*
* @param deleteConditionParameterMaps map of matching StreamVariable Ids and their values corresponding to the
* compiled condition
* @param compiledCondition the compiledCondition against which records should be matched for deletion
* @throws ConnectionUnavailableException
*/
protected abstract void delete(List<Map<String, Object>> deleteConditionParameterMaps,
CompiledCondition compiledCondition)
throws ConnectionUnavailableException;
@Override
public void update(ComplexEventChunk<StateEvent> updatingEventChunk, CompiledCondition compiledCondition,
CompiledUpdateSet compiledUpdateSet) throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition =
((RecordStoreCompiledCondition) compiledCondition);
RecordTableCompiledUpdateSet recordTableCompiledUpdateSet = (RecordTableCompiledUpdateSet) compiledUpdateSet;
List<Map<String, Object>> updateConditionParameterMaps = new ArrayList<>();
List<Map<String, Object>> updateSetParameterMaps = new ArrayList<>();
updatingEventChunk.reset();
long timestamp = 0L;
while (updatingEventChunk.hasNext()) {
StateEvent stateEvent = updatingEventChunk.next();
Map<String, Object> variableMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateConditionParameterMaps.add(variableMap);
Map<String, Object> variableMapForUpdateSet = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordTableCompiledUpdateSet.getExpressionExecutorMap().entrySet()) {
variableMapForUpdateSet.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateSetParameterMaps.add(variableMapForUpdateSet);
timestamp = stateEvent.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.update(timestamp, recordStoreCompiledCondition.compiledCondition,
updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(),
updateSetParameterMaps);
} else {
update(recordStoreCompiledCondition.compiledCondition, updateConditionParameterMaps,
recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps);
}
}
/**
* Update all matching records
*
* @param updateCondition the compiledCondition against which records should be matched for update
* @param updateConditionParameterMaps map of matching StreamVariable Ids and their values corresponding to the
* compiled condition based on which the records will be updated
* @param updateSetExpressions the set of updates mappings and related complied expressions
* @param updateSetParameterMaps map of matching StreamVariable Ids and their values corresponding to the
* @throws ConnectionUnavailableException
*/
protected abstract void update(CompiledCondition updateCondition,
List<Map<String, Object>> updateConditionParameterMaps,
Map<String, CompiledExpression> updateSetExpressions,
List<Map<String, Object>> updateSetParameterMaps)
throws ConnectionUnavailableException;
@Override
public void updateOrAdd(ComplexEventChunk<StateEvent> updateOrAddingEventChunk,
CompiledCondition compiledCondition, CompiledUpdateSet compiledUpdateSet,
AddingStreamEventExtractor addingStreamEventExtractor)
throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition =
((RecordStoreCompiledCondition) compiledCondition);
RecordTableCompiledUpdateSet recordTableCompiledUpdateSet = (RecordTableCompiledUpdateSet) compiledUpdateSet;
List<Map<String, Object>> updateConditionParameterMaps = new ArrayList<>();
List<Map<String, Object>> updateSetParameterMaps = new ArrayList<>();
List<Object[]> addingRecords = new ArrayList<>();
updateOrAddingEventChunk.reset();
long timestamp = 0L;
while (updateOrAddingEventChunk.hasNext()) {
StateEvent stateEvent = updateOrAddingEventChunk.next();
Map<String, Object> variableMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateConditionParameterMaps.add(variableMap);
Map<String, Object> variableMapForUpdateSet = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry :
recordTableCompiledUpdateSet.getExpressionExecutorMap().entrySet()) {
variableMapForUpdateSet.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateSetParameterMaps.add(variableMapForUpdateSet);
addingRecords.add(stateEvent.getStreamEvent(0).getOutputData());
timestamp = stateEvent.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.updateOrAdd(timestamp, recordStoreCompiledCondition.compiledCondition,
updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(),
updateSetParameterMaps, addingRecords);
} else {
updateOrAdd(recordStoreCompiledCondition.compiledCondition, updateConditionParameterMaps,
recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps, addingRecords);
}
}
/**
* Try updating the records if they exist else add the records
*
* @param updateCondition the compiledCondition against which records should be matched for update
* @param updateConditionParameterMaps map of matching StreamVariable Ids and their values corresponding to the
* compiled condition based on which the records will be updated
* @param updateSetExpressions the set of updates mappings and related complied expressions
* @param updateSetParameterMaps map of matching StreamVariable Ids and their values corresponding to the
* update set
* @param addingRecords the values for adding new records if the update condition did not match
* @throws ConnectionUnavailableException
*/
protected abstract void updateOrAdd(CompiledCondition updateCondition,
List<Map<String, Object>> updateConditionParameterMaps,
Map<String, CompiledExpression> updateSetExpressions,
List<Map<String, Object>> updateSetParameterMaps,
List<Object[]> addingRecords)
throws ConnectionUnavailableException;
@Override
public CompiledCondition compileCondition(Expression condition, MatchingMetaInfoHolder matchingMetaInfoHolder,
List<VariableExpressionExecutor> variableExpressionExecutors,
Map<String, Table> tableMap, SiddhiQueryContext siddhiQueryContext) {
ExpressionBuilder expressionBuilder = new ExpressionBuilder(condition, matchingMetaInfoHolder,
variableExpressionExecutors, tableMap, siddhiQueryContext);
CompiledCondition compileCondition = compileCondition(expressionBuilder);
Map<String, ExpressionExecutor> expressionExecutorMap = expressionBuilder.getVariableExpressionExecutorMap();
return new RecordStoreCompiledCondition(expressionExecutorMap, compileCondition);
}
public CompiledUpdateSet compileUpdateSet(UpdateSet updateSet,
MatchingMetaInfoHolder matchingMetaInfoHolder,
List<VariableExpressionExecutor> variableExpressionExecutors,
Map<String, Table> tableMap, SiddhiQueryContext siddhiQueryContext) {
RecordTableCompiledUpdateSet recordTableCompiledUpdateSet = new RecordTableCompiledUpdateSet();
Map<String, ExpressionExecutor> parentExecutorMap = new HashMap<>();
for (UpdateSet.SetAttribute setAttribute : updateSet.getSetAttributeList()) {
ExpressionBuilder expressionBuilder = new ExpressionBuilder(setAttribute.getAssignmentExpression(),
matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
CompiledExpression compiledExpression = compileSetAttribute(expressionBuilder);
recordTableCompiledUpdateSet.put(setAttribute.getTableVariable().getAttributeName(), compiledExpression);
Map<String, ExpressionExecutor> expressionExecutorMap =
expressionBuilder.getVariableExpressionExecutorMap();
parentExecutorMap.putAll(expressionExecutorMap);
}
recordTableCompiledUpdateSet.setExpressionExecutorMap(parentExecutorMap);
return recordTableCompiledUpdateSet;
}
/**
* Compile the matching expression
*
* @param expressionBuilder helps visiting the conditions in order to compile the condition
* @return compiled expression that can be used for matching events in find, contains, delete, update and
* updateOrAdd
*/
protected abstract CompiledCondition compileCondition(ExpressionBuilder expressionBuilder);
/**
* Compiles the expression in a set clause
*
* @param expressionBuilder helps visiting the conditions in order to compile the condition
* @return compiled expression that can be used for matching events in find, contains, delete, update and
* updateOrAdd
*/
protected abstract CompiledExpression compileSetAttribute(ExpressionBuilder expressionBuilder);
/**
* Compiled condition of the {@link AbstractRecordTable}
*/
protected class RecordStoreCompiledCondition implements CompiledCondition {
protected Map<String, ExpressionExecutor> variableExpressionExecutorMap;
protected CompiledCondition compiledCondition;
RecordStoreCompiledCondition(Map<String, ExpressionExecutor> variableExpressionExecutorMap,
CompiledCondition compiledCondition) {
this.variableExpressionExecutorMap = variableExpressionExecutorMap;
this.compiledCondition = compiledCondition;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.ExpirableRunnable;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.FocusCommand;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.util.containers.WeakKeyWeakValueHashMap;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.util.*;
import java.util.List;
public class FocusTrackback {
private static final Logger LOG = Logger.getInstance("FocusTrackback");
private static final Map<Window, List<FocusTrackback>> ourRootWindowToParentsStack = new WeakHashMap<>();
private static final Map<Window, Component> ourRootWindowToFocusedMap = new WeakKeyWeakValueHashMap<>();
private Window myParentWindow;
private Window myRoot;
private WeakReference<Component> myFocusOwner = new WeakReference<>(null);
private WeakReference<Component> myLocalFocusOwner = new WeakReference<>(null);
private final String myRequestorName;
private ComponentQuery myFocusedComponentQuery;
private boolean myMustBeShown;
private boolean myConsumed;
private final WeakReference myRequestor;
private boolean myScheduledForRestore;
private boolean myWillBeScheduledForRestore;
private boolean myForcedRestore;
public FocusTrackback(@NotNull Object requestor, Component parent, boolean mustBeShown) {
this(requestor, parent == null ? null : UIUtil.getWindow(parent), mustBeShown);
}
public FocusTrackback(@NotNull Object requestor, Window parent, boolean mustBeShown) {
myRequestor = new WeakReference<>(requestor);
myRequestorName = requestor.toString();
myParentWindow = parent;
myMustBeShown = mustBeShown;
if (isHeadlessOrWrongOS()) return;
register(parent);
final List<FocusTrackback> stack = getStackForRoot(myRoot);
final int index = stack.indexOf(this);
//todo [kirillk] diagnostics for IDEADEV-28766
assert index >= 0 : myRequestorName;
final KeyboardFocusManager manager = KeyboardFocusManager.getCurrentKeyboardFocusManager();
setLocalFocusOwner(manager.getPermanentFocusOwner());
final IdeFocusManager fm = IdeFocusManager.getGlobalInstance();
if (myLocalFocusOwner.get() == null && fm.isFocusBeingTransferred()) {
if (index > 0) {
int eachIndex = index - 1;
while (eachIndex > 0) {
final FocusTrackback each = stack.get(eachIndex);
if (!each.isConsumed()) {
Component component = each.myLocalFocusOwner.get();
if (component != null) {
setLocalFocusOwner(component);
break;
}
}
eachIndex--;
}
}
}
if (index == 0) {
setFocusOwner(manager.getPermanentFocusOwner());
if (getFocusOwner() == null) {
final Window window = manager.getActiveWindow();
if (window instanceof Provider) {
final FocusTrackback other = ((Provider)window).getFocusTrackback();
if (other != null) {
setFocusOwner(other.getFocusOwner());
}
}
}
}
else {
setFocusOwner(stack.get(0).getFocusOwner());
}
if (stack.size() == 1 && getFocusOwner() == null) {
setFocusOwner(getFocusFor(myRoot));
}
else if (index == 0 && getFocusOwner() != null) {
setFocusFor(myRoot, getFocusOwner());
}
}
private static boolean isHeadlessOrWrongOS() {
Application app = ApplicationManager.getApplication();
return app == null || app.isHeadlessEnvironment() || wrongOS();
}
private void setLocalFocusOwner(Component component) {
myLocalFocusOwner = new WeakReference<>(component);
}
public static Component getFocusFor(Window parent) {
return ourRootWindowToFocusedMap.get(parent);
}
private static void setFocusFor(Window parent, Component focus) {
ourRootWindowToFocusedMap.put(parent, focus);
}
private static boolean wrongOS() {
return false;
}
public void registerFocusComponent(@NotNull final Component focusedComponent) {
registerFocusComponent(new ComponentQuery() {
public Component getComponent() {
return focusedComponent;
}
});
}
public void registerFocusComponent(@NotNull ComponentQuery query) {
myFocusedComponentQuery = query;
}
private void register(final Window parent) {
myRoot = findUltimateParent(parent);
List<FocusTrackback> stack = getCleanStackForRoot();
stack.remove(this);
stack.add(this);
}
private List<FocusTrackback> getCleanStackForRoot() {
return myRoot == null ? Collections.emptyList() : getCleanStackForRoot(myRoot);
}
private static List<FocusTrackback> getCleanStackForRoot(@NotNull Window root) {
List<FocusTrackback> stack = getStackForRoot(root);
final FocusTrackback[] stackArray = stack.toArray(new FocusTrackback[stack.size()]);
for (FocusTrackback eachExisting : stackArray) {
if (eachExisting != null && eachExisting.isConsumed()) {
eachExisting.dispose();
}
else if (eachExisting == null) {
stack.remove(eachExisting);
}
}
return stack;
}
public void restoreFocus() {
if (isHeadlessOrWrongOS() || myConsumed || isScheduledForRestore()) return;
Project project = null;
DataManager dataManager = DataManager.getInstance();
if (dataManager != null) {
DataContext context = myParentWindow == null ? dataManager.getDataContext() : dataManager.getDataContext(myParentWindow);
if (context != null) {
project = CommonDataKeys.PROJECT.getData(context);
}
}
myScheduledForRestore = true;
final List<FocusTrackback> stack = getCleanStackForRoot();
final int index = stack.indexOf(this);
for (int i = index - 1; i >=0; i--) {
if (stack.get(i).isScheduledForRestore()) {
dispose();
return;
}
}
if (project != null && !project.isDisposed()) {
final IdeFocusManager focusManager = IdeFocusManager.getInstance(project);
cleanParentWindow();
final Project finalProject = project;
focusManager.requestFocus(new MyFocusCommand(), myForcedRestore).doWhenProcessed(() -> dispose()).doWhenRejected(() -> focusManager.revalidateFocus(new ExpirableRunnable.ForProject(finalProject) {
@Override
public void run() {
if (UIUtil.isMeaninglessFocusOwner(focusManager.getFocusOwner())) {
focusManager.requestDefaultFocus(false);
}
}
}));
}
else {
// no ide focus manager, so no way -- do just later
//noinspection SSBasedInspection
SwingUtilities.invokeLater(() -> {
_restoreFocus();
dispose();
});
}
}
private ActionCallback _restoreFocus() {
if (isConsumed()) return ActionCallback.REJECTED;
List<FocusTrackback> stack = getCleanStack();
if (!stack.contains(this)) return ActionCallback.REJECTED;
Component toFocus = queryToFocus(stack, this, true);
final ActionCallback result = new ActionCallback();
if (toFocus != null) {
final Component ownerBySwing = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
if (ownerBySwing != null) {
final Window ownerBySwingWindow = SwingUtilities.getWindowAncestor(ownerBySwing);
if (ownerBySwingWindow != null && ownerBySwingWindow == SwingUtilities.getWindowAncestor(toFocus)) {
if (!UIUtil.isMeaninglessFocusOwner(ownerBySwing)) {
toFocus = ownerBySwing;
}
}
}
if (myParentWindow != null) {
final Window to = UIUtil.getWindow(toFocus);
if (to != null && UIUtil.findUltimateParent(to) == UIUtil.findUltimateParent(myParentWindow)) { // IDEADEV-34537
requestFocus(toFocus);
result.setDone();
}
} else {
requestFocus(toFocus);
result.setDone();
}
}
if (!result.isDone()) {
result.setRejected();
}
stack.remove(this);
dispose();
return result;
}
private void requestFocus(Component toFocus) {
if (myForcedRestore) {
toFocus.requestFocus();
} else {
toFocus.requestFocusInWindow();
}
}
private static Component queryToFocus(final List<FocusTrackback> stack, final FocusTrackback trackback, boolean mustBeLastInStack) {
final int index = stack.indexOf(trackback);
Component toFocus = null;
Component focusOwner = trackback.myLocalFocusOwner.get();
if (focusOwner != null) {
toFocus = focusOwner;
if (UIUtil.isMeaninglessFocusOwner(toFocus)) {
toFocus = null;
}
}
if (toFocus == null) {
if (index > 0) {
final ComponentQuery query = stack.get(index - 1).myFocusedComponentQuery;
toFocus = query != null ? query.getComponent() : null;
}
else {
toFocus = trackback.getFocusOwner();
}
}
if (mustBeLastInStack) {
for (int i = index + 1; i < stack.size(); i++) {
if (!stack.get(i).isMustBeShown()) {
if ((stack.get(i).isScheduledForRestore() || stack.get(i).isWillBeScheduledForRestore()) && !stack.get(i).isConsumed()) {
toFocus = null;
break;
}
} else if (!stack.get(i).isConsumed()) {
toFocus = null;
break;
}
}
}
return toFocus;
}
private List<FocusTrackback> getCleanStack() {
final List<FocusTrackback> stack = getStackForRoot(myRoot);
final FocusTrackback[] all = stack.toArray(new FocusTrackback[stack.size()]);
for (FocusTrackback each : all) {
if (each == null || each != this && each.isConsumed()) {
stack.remove(each);
}
}
return stack;
}
private static List<FocusTrackback> getStackForRoot(@NotNull Window root) {
List<FocusTrackback> stack = ourRootWindowToParentsStack.get(root);
if (stack == null) {
stack = new ArrayList<>();
ourRootWindowToParentsStack.put(root, stack);
}
return stack;
}
@Nullable
private static Window findUltimateParent(final Window parent) {
Window root = parent == null ? JOptionPane.getRootFrame() : parent;
while (root != null) {
final Container next = root.getParent();
if (next == null) break;
if (next instanceof Window) {
root = (Window)next;
}
final Window nextWindow = SwingUtilities.getWindowAncestor(next);
if (nextWindow == null) break;
root = nextWindow;
}
return root;
}
@Nullable
public Component getFocusOwner() {
return myFocusOwner.get();
}
@SuppressWarnings({"HardCodedStringLiteral"})
public String toString() {
return getClass().getName() + " requestor: " + myRequestorName + " parent=" + myParentWindow;
}
public void dispose() {
if (myRoot == null) return;
consume();
getStackForRoot(myRoot).remove(this);
myScheduledForRestore = false;
if (myParentWindow != null) {
FocusTraversalPolicy policy = myParentWindow.getFocusTraversalPolicy();
if (policy instanceof LayoutFocusTraversalPolicyExt) {
((LayoutFocusTraversalPolicyExt)policy).setNoDefaultComponent(false, this);
}
}
myParentWindow = null;
myRoot = null;
myFocusOwner.clear();
myLocalFocusOwner.clear();
}
private boolean isConsumed() {
if (myConsumed) return true;
if (myMustBeShown) {
return !isScheduledForRestore()
&& myFocusedComponentQuery != null
&& myFocusedComponentQuery.getComponent() != null
&& !myFocusedComponentQuery.getComponent().isShowing();
}
else {
return myParentWindow == null || !myParentWindow.isShowing();
}
}
public void consume() {
myConsumed = true;
}
private void setFocusOwner(final Component focusOwner) {
myFocusOwner = new WeakReference<>(focusOwner);
}
public void setMustBeShown(final boolean mustBeShown) {
myMustBeShown = mustBeShown;
}
public boolean isMustBeShown() {
return myMustBeShown;
}
public static void release(@NotNull final JFrame frame) {
final Window[] all = ourRootWindowToParentsStack.keySet().toArray(new Window[ourRootWindowToParentsStack.size()]);
for (Window each : all) {
if (each == null) continue;
if (each == frame || SwingUtilities.isDescendingFrom(each, frame)) {
ourRootWindowToParentsStack.remove(each);
}
}
ourRootWindowToFocusedMap.remove(frame);
}
public Object getRequestor() {
return myRequestor.get();
}
public void setWillBeScheduledForRestore() {
myWillBeScheduledForRestore = true;
}
public boolean isScheduledForRestore() {
return myScheduledForRestore;
}
public boolean isWillBeScheduledForRestore() {
return myWillBeScheduledForRestore;
}
public void setForcedRestore(boolean forcedRestore) {
myForcedRestore = forcedRestore;
}
public void cleanParentWindow() {
if (!Registry.is("focus.fix.lost.cursor")) return;
if (myParentWindow != null) {
try {
Method tmpLost = Window.class.getDeclaredMethod("setTemporaryLostComponent", Component.class);
tmpLost.setAccessible(true);
tmpLost.invoke(myParentWindow, new Object[] {null});
Method owner =
KeyboardFocusManager.class.getDeclaredMethod("setMostRecentFocusOwner", Window.class, Component.class);
owner.setAccessible(true);
owner.invoke(null, myParentWindow, null);
FocusTraversalPolicy policy = myParentWindow.getFocusTraversalPolicy();
if (policy instanceof LayoutFocusTraversalPolicyExt) {
((LayoutFocusTraversalPolicyExt)policy).setNoDefaultComponent(true, this);
}
}
catch (Exception e) {
LOG.debug(e);
}
}
}
public interface Provider {
FocusTrackback getFocusTrackback();
}
public interface ComponentQuery {
Component getComponent();
}
@NotNull
public static List<JBPopup> getChildPopups(@NotNull final Component component) {
List<JBPopup> result = new ArrayList<>();
final Window window = UIUtil.getWindow(component);
if (window == null) return result;
final List<FocusTrackback> stack = getCleanStackForRoot(findUltimateParent(window));
for (FocusTrackback each : stack) {
if (each.isChildFor(component) && each.getRequestor() instanceof JBPopup) {
result.add((JBPopup)each.getRequestor());
}
}
return result;
}
private boolean isChildFor(final Component parent) {
final Component toFocus = queryToFocus(getCleanStack(), this, false);
if (toFocus == null) return false;
if (parent == toFocus) return true;
if (SwingUtilities.isDescendingFrom(toFocus, parent)) return true;
Component eachToFocus = getFocusOwner();
FocusTrackback eachTrackback = this;
while (true) {
if (eachToFocus == null) {
break;
}
if (SwingUtilities.isDescendingFrom(eachToFocus, parent)) return true;
if (eachTrackback.getRequestor() instanceof AbstractPopup) {
FocusTrackback newTrackback = ((AbstractPopup)eachTrackback.getRequestor()).getFocusTrackback();
if (newTrackback == null || eachTrackback == newTrackback) break;
if (eachTrackback == null || eachTrackback.isConsumed()) break;
eachTrackback = newTrackback;
eachToFocus = eachTrackback.getFocusOwner();
} else {
break;
}
}
return false;
}
private class MyFocusCommand extends FocusCommand {
@NotNull
public ActionCallback run() {
return _restoreFocus();
}
@Override
public boolean isExpired() {
return isConsumed();
}
public String toString() {
return "focus trackback requestor";
}
}
}
| |
package de.gishmo.gwt.editor.processor;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.StringJoiner;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.lang.model.element.Element;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import com.google.auto.common.BasicAnnotationProcessor.ProcessingStep;
import com.google.auto.common.MoreElements;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.SetMultimap;
import com.google.gwt.editor.client.Editor;
import com.google.gwt.editor.client.Editor.Ignore;
import com.google.gwt.editor.client.EditorVisitor;
import com.google.gwt.editor.client.impl.AbstractEditorContext;
import com.google.gwt.editor.client.impl.AbstractSimpleBeanEditorDriver;
import com.google.gwt.editor.client.impl.RootEditorContext;
import com.google.gwt.editor.client.impl.SimpleBeanEditorDelegate;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import de.gishmo.gwt.editor.client.annotation.IsEditor;
class EditorProcessingStep
implements ProcessingStep {
private Messager messager;
private Filer filer;
private Types types;
private Elements elements;
private List<String> alreadyGeneratedEditorDelegates;
private EditorProcessingStep(Builder builder) {
this.messager = builder.messager;
this.filer = builder.filer;
this.types = builder.types;
this.elements = builder.elements;
this.alreadyGeneratedEditorDelegates = new ArrayList<>();
}
static Builder builder() {
return new Builder();
}
@Override
public Set<? extends Class<? extends Annotation>> annotations() {
return Collections.singleton(IsEditor.class);
}
@Override
public Set<Element> process(SetMultimap<Class<? extends Annotation>, Element> elementsByAnnotation) {
for (Element element : elementsByAnnotation.get(IsEditor.class)) {
EditorProcessingContext context = EditorProcessingContext.builder()
.withElements(this.elements)
.withFiler(this.filer)
.withMessenger(this.messager)
.withTypes(this.types)
.forElement(element)
.build();
if (context != null) {
generate(context);
}
}
return ImmutableSet.of();
}
private void generate(EditorProcessingContext context) {
// create SimpleBeanEditorDelegates
context.getEditorModels()
.stream()
.forEach(this::generateEditorClass);
// create Context
context.getEditorModels()
.stream()
.forEach(editorModel -> generateEditorContextClass(context,
editorModel));
// create SimpleBeanEditorDelegate
generateSimpleBeanEditorDelegateClass(context);
// create implementation
generateImplClass(context);
}
/**
* <p>Create an EditorContext implementation that will provide access to
* data owned by parent. In other words, given the EditorData
* for a {@code PersonEditor} and the EditorData for a {@code AddressEditor}
* nested in the {@code PersonEditor}, create an EditorContext that will
* describe the relationship.</p>
*/
private void generateEditorContextClass(EditorProcessingContext context,
EditorModel editorModel) {
// @Ignore ==> noting to do to do here ... leave
if (editorModel.getModelElement()
.getAnnotation(Ignore.class) != null) {
return;
}
// ToDo
TypeElement returnType = context.getModelReturnTypeForAttribute(editorModel.getAttibuteName());
if (returnType == null) {
return;
}
TypeSpec.Builder typeSpec = TypeSpec.classBuilder(editorModel.getContextName())
.addModifiers(Modifier.PUBLIC)
.superclass(ParameterizedTypeName.get(ClassName.get(AbstractEditorContext.class),
ClassName.get(MoreElements.getPackage(returnType)
.toString(),
returnType.getSimpleName()
.toString())));
FieldSpec parentField = FieldSpec.builder(TypeName.get(context.getModelElement()
.asType()),
"parent")
.addModifiers(Modifier.PRIVATE,
Modifier.FINAL)
.build();
typeSpec.addField(parentField);
// constructor
MethodSpec.Builder constructor = MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(ParameterSpec.builder(ClassName.get(MoreElements.getPackage(context.getModelElement())
.toString(),
context.getModelElement()
.getSimpleName()
.toString()),
"parent")
.build())
.addParameter(ParameterSpec.builder(ParameterizedTypeName.get(ClassName.get(Editor.class),
ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName()))),
"editor")
.build())
.addParameter(ClassName.get(String.class),
"path");
constructor.addStatement("super(editor, path)");
constructor.addStatement("this.parent = parent");
typeSpec.addMethod(constructor.build());
StringJoiner sj01 = new StringJoiner("");
sj01.add("return parent != null");
StringJoiner sj02 = new StringJoiner("");
sj02.add("parent");
if (editorModel.getPath()
.indexOf(".") > 0) {
Pattern.compile(Pattern.quote("."))
.splitAsStream(editorModel.getPath())
.collect(Collectors.toList())
.forEach(attribute -> {
String lastAttribute = editorModel.getPath()
.substring(editorModel.getPath()
.lastIndexOf(".") + 1);
if (!lastAttribute.equals(attribute)) {
sj02.add(".")
.add(createGetterMethodName(attribute))
.add("()");
sj01.add(" && ")
.add(sj02.toString())
.add(" != null");
}
});
}
typeSpec.addMethod(MethodSpec.methodBuilder("canSetInModel")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(boolean.class)
.addStatement(sj01.toString())
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("checkAssignment")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(ClassName.get(Object.class),
"value")
.returns(ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName())))
.addStatement("return ($T) value",
ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName())))
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("getEditedType")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(ClassName.get(Class.class))
.addStatement("return $T.class",
ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName())))
.build());
StringJoiner sj06 = new StringJoiner("");
sj06.add("return (parent != null");
StringJoiner sj04 = new StringJoiner("");
sj04.add("parent");
if (editorModel.getPath()
.indexOf(".") > 0) {
Pattern.compile(Pattern.quote("."))
.splitAsStream(editorModel.getPath())
.collect(Collectors.toList())
.forEach(attribute -> {
String lastAttribute = editorModel.getPath()
.substring(editorModel.getPath()
.lastIndexOf(".") + 1);
if (lastAttribute.equals(attribute)) {
sj04.add(".")
.add(createGetterMethodName(attribute))
.add("()");
} else {
sj04.add(".")
.add(createGetterMethodName(attribute))
.add("()");
sj06.add(" && ")
.add(sj04.toString())
.add(" != null");
}
});
sj06.add(") ? ")
.add(sj04.toString())
.add(" : null");
} else {
sj06.add(") ? ")
.add(sj04.toString())
.add(".")
.add(createGetterMethodName(editorModel.getPath()))
.add("() : null");
}
typeSpec.addMethod(MethodSpec.methodBuilder("getFromModel")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName())))
.addStatement(sj06.toString())
.build());
StringJoiner sj05 = new StringJoiner("");
sj05.add("parent.");
if (editorModel.getPath()
.indexOf(".") > 0) {
Pattern.compile(Pattern.quote("."))
.splitAsStream(editorModel.getPath())
.collect(Collectors.toList())
.forEach(attribute -> {
String lastAttribute = editorModel.getPath()
.substring(editorModel.getPath()
.lastIndexOf(".") + 1);
if (lastAttribute.equals(attribute)) {
sj05.add(createSetterMethodName(attribute));
} else {
sj05.add(createGetterMethodName(attribute))
.add("().");
}
});
} else {
sj05.add(createSetterMethodName(editorModel.getPath()));
}
sj05.add("(data)");
typeSpec.addMethod(MethodSpec.methodBuilder("setInModel")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(ClassName.get(context.getModelReturnTypeForAttribute(editorModel.getAttibuteName())),
"data")
.addStatement(sj05.toString())
.build());
JavaFile javaFile = JavaFile.builder(context.getEditorPackageName(),
typeSpec.build())
.build();
// System.out.println(javaFile.toString());
try {
javaFile.writeTo(filer);
} catch (IOException e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR,
"Error generating source file for type: " + MoreElements.getPackage(editorModel.getEditorTypeElement())
.toString() + editorModel.getContextName());
}
}
private void generateSimpleBeanEditorDelegateClass(EditorProcessingContext context) {
TypeSpec.Builder typeSpec = TypeSpec.classBuilder(context.getEditorSimpleName() + "_SimpleBeanEditorDelegate")
.addModifiers(Modifier.PUBLIC)
.superclass(ClassName.get(SimpleBeanEditorDelegate.class));
FieldSpec editorField = FieldSpec.builder(TypeName.get(context.getEditorElement()
.asType()),
"editor")
.addModifiers(Modifier.PRIVATE)
.build();
typeSpec.addField(editorField);
FieldSpec objectField = FieldSpec.builder(TypeName.get(context.getModelElement()
.asType()),
"object")
.addModifiers(Modifier.PRIVATE)
.build();
typeSpec.addField(objectField);
typeSpec.addMethod(MethodSpec.methodBuilder("getEditor")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.returns(TypeName.get(context.getEditorElement()
.asType()))
.addStatement("return editor")
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("setEditor")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.addParameter(Editor.class,
"editor")
.addStatement("this.editor = ($T) editor",
TypeName.get(context.getEditorElement()
.asType()))
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("getObject")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(TypeName.get(context.getModelElement()
.asType()))
.addStatement("return object")
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("setObject")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.addParameter(Object.class,
"object")
.addStatement("this.object = ($T) object",
TypeName.get(context.getModelElement()
.asType()))
.build());
context.getEditorModels()
.stream()
.forEach(editorModel -> typeSpec.addField(FieldSpec.builder(SimpleBeanEditorDelegate.class,
editorModel.getSimpleAttibuteName() + "Delegate")
.build()));
MethodSpec.Builder initlializeSubDelegatesMethod = MethodSpec.methodBuilder("initializeSubDelegates")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED);
context.getEditorModels()
.stream()
.forEach(editorModel -> {
initlializeSubDelegatesMethod.beginControlFlow("if (editor.$L.asEditor() != null)",
editorModel.getSimpleEditorAttibuteName());
initlializeSubDelegatesMethod.addStatement("$LDelegate = new $T()",
editorModel.getSimpleAttibuteName(),
ClassName.get(MoreElements.getPackage(editorModel.getEditorTypeElement())
.toString(),
editorModel.getEditorTypeSimpleName()));
initlializeSubDelegatesMethod.addStatement("addSubDelegate($LDelegate, appendPath($S), editor.$L.asEditor())",
editorModel.getSimpleAttibuteName(),
editorModel.getSimpleAttibuteName(),
editorModel.getSimpleEditorAttibuteName());
initlializeSubDelegatesMethod.endControlFlow();
});
typeSpec.addMethod(initlializeSubDelegatesMethod.build());
MethodSpec.Builder acceptMethod = MethodSpec.methodBuilder("accept")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(TypeName.get(EditorVisitor.class),
"visitor");
context.getEditorModels()
.stream()
.forEach(editorModel -> {
acceptMethod.beginControlFlow("if ($LDelegate != null)",
editorModel.getSimpleAttibuteName());
acceptMethod.addStatement("$T ctx = new $T(getObject(), editor.$L.asEditor(), appendPath($S))",
ClassName.get(context.getEditorPackageName(),
editorModel.getContextName()),
ClassName.get(context.getEditorPackageName(),
editorModel.getContextName()),
editorModel.getSimpleEditorAttibuteName(),
editorModel.getSimpleAttibuteName());
acceptMethod.addStatement("ctx.setEditorDelegate($LDelegate)",
editorModel.getSimpleAttibuteName());
acceptMethod.addStatement("ctx.traverse(visitor, $LDelegate)",
editorModel.getSimpleAttibuteName());
acceptMethod.endControlFlow();
});
typeSpec.addMethod(acceptMethod.build());
JavaFile javaFile = JavaFile.builder(context.getEditorPackageName(),
typeSpec.build())
.build();
// System.out.println(javaFile.toString());
try {
javaFile.writeTo(filer);
} catch (IOException e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR,
"Error generating source file for type: " + context.getConsumerPackageName() + "." + context.getConsumerSimpleName() + "_SimpleBeanEditorDelegate");
}
}
private void generateImplClass(EditorProcessingContext context) {
TypeSpec.Builder typeSpec = TypeSpec.classBuilder(context.getElement()
.getSimpleName()
.toString() + "Impl")
.addModifiers(Modifier.PUBLIC)
.superclass(ParameterizedTypeName.get(ClassName.get(AbstractSimpleBeanEditorDriver.class),
ClassName.get(context.getModelElement()),
ClassName.get(context.getEditorElement())))
.addSuperinterface(ClassName.get((TypeElement) context.getElement()));
ParameterSpec visitorParameter = ParameterSpec.builder(ClassName.get(EditorVisitor.class),
"visitor")
.build();
typeSpec.addMethod(MethodSpec.methodBuilder("accept")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(void.class)
.addParameter(visitorParameter)
.addStatement("$T ctx = new $T(getDelegate(), $T.class, getObject())",
ClassName.get(RootEditorContext.class),
ClassName.get(RootEditorContext.class),
ClassName.get(context.getModelElement()))
.addStatement("ctx.traverse($N, getDelegate())",
visitorParameter)
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("createDelegate")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.returns(SimpleBeanEditorDelegate.class)
.addStatement("return new $T()",
ClassName.get(context.getEditorPackageName(),
context.getEditorSimpleName() + "_SimpleBeanEditorDelegate"))
.build());
JavaFile javaFile = JavaFile.builder(context.getConsumerPackageName(),
typeSpec.build())
.build();
// System.out.println(javaFile.toString());
try {
javaFile.writeTo(filer);
} catch (IOException e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR,
"Error generating source file for type: " + context.getElement()
.getSimpleName()
.toString() + "EditorDriverImpl");
}
}
private String createGetterMethodName(String path) {
return createGetterSetterMethodName("get",
path);
}
private String createSetterMethodName(String path) {
return createGetterSetterMethodName("set",
path);
}
private String createGetterSetterMethodName(String prefix,
String path) {
String name = path;
if (name.indexOf(".") > 0) {
name = path.substring(name.indexOf(".") - 1);
}
name = prefix + name.substring(0,
1)
.toUpperCase() + name.substring(1);
return name;
}
private void generateEditorClass(EditorModel editorModel) {
// check weather we did already generate the class
if (alreadyGeneratedEditorDelegates.contains(editorModel.getEditorTypeSimpleName())) {
return;
}
TypeSpec.Builder typeSpec = TypeSpec.classBuilder(editorModel.getEditorTypeSimpleName())
.addModifiers(Modifier.PUBLIC)
.superclass(ClassName.get(SimpleBeanEditorDelegate.class));
FieldSpec editorField = FieldSpec.builder(editorModel.getEditorTypeAsClassName(),
"editor",
Modifier.PRIVATE)
.build();
typeSpec.addField(editorField);
FieldSpec objectField = FieldSpec.builder(editorModel.getDataTypeAsClassName(),
"object",
Modifier.PRIVATE)
.build();
typeSpec.addField(objectField);
typeSpec.addMethod(MethodSpec.methodBuilder("getEditor")
.returns(editorModel.getEditorTypeAsClassName())
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.addStatement("return $N",
editorField)
.build());
ParameterSpec editorParameter = ParameterSpec.builder(Editor.class,
"editor")
.build();
typeSpec.addMethod(MethodSpec.methodBuilder("setEditor")
.addParameter(editorParameter)
.addStatement("this.$N = ($T) $N",
editorField,
editorModel.getEditorTypeAsClassName(),
editorParameter)
.addModifiers(Modifier.PROTECTED)
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("getObject")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(editorModel.getDataTypeAsClassName())
.addStatement("return $N",
objectField)
.build());
ParameterSpec objectParameter = ParameterSpec.builder(Object.class,
"object")
.build();
typeSpec.addMethod(MethodSpec.methodBuilder("setObject")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.addParameter(objectParameter)
.addStatement("this.$N = ($T) $N",
objectField,
editorModel.getDataTypeAsClassName(),
objectParameter)
.build());
typeSpec.addMethod(MethodSpec.methodBuilder("initializeSubDelegates")
.addAnnotation(Override.class)
.addModifiers(Modifier.PROTECTED)
.build());
ParameterSpec visitorParameter = ParameterSpec.builder(EditorVisitor.class,
"visitor")
.build();
typeSpec.addMethod(MethodSpec.methodBuilder("accept")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(visitorParameter)
.build());
JavaFile javaFile = JavaFile.builder(MoreElements.getPackage(editorModel.getEditorTypeElement())
.toString(),
typeSpec.build())
.build();
// System.out.println(javaFile.toString());
try {
javaFile.writeTo(filer);
// add file name to the list of already generated files to avoid a second generation
alreadyGeneratedEditorDelegates.add(editorModel.getEditorTypeSimpleName());
} catch (IOException e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR,
"Error generating source file for type: " + MoreElements.getPackage(editorModel.getEditorTypeElement())
.toString() + editorModel.getEditorTypeSimpleName());
}
}
public static final class Builder {
Messager messager;
Filer filer;
Types types;
Elements elements;
public Builder withMessenger(Messager messager) {
this.messager = messager;
return this;
}
public Builder withFiler(Filer filer) {
this.filer = filer;
return this;
}
public Builder withTypes(Types types) {
this.types = types;
return this;
}
public Builder withElements(Elements elements) {
this.elements = elements;
return this;
}
public EditorProcessingStep build() {
return new EditorProcessingStep(this);
}
}
}
| |
/*******************************************************************************
* Open Behavioral Health Information Technology Architecture (OBHITA.org)
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package gov.samhsa.consent2share.domain.clinicaldata;
import flexjson.JSONDeserializer;
import flexjson.JSONSerializer;
import gov.samhsa.consent2share.domain.patient.Patient;
import gov.samhsa.consent2share.domain.reference.AdverseEventTypeCode;
import gov.samhsa.consent2share.domain.reference.AllergyReactionCode;
import gov.samhsa.consent2share.domain.reference.AllergySeverityCode;
import gov.samhsa.consent2share.domain.reference.AllergyStatusCode;
import gov.samhsa.consent2share.domain.valueobject.CodedConcept;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Version;
import javax.validation.constraints.NotNull;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.springframework.format.annotation.DateTimeFormat;
/**
* The Class Allergy.
*/
@Entity
public class Allergy {
/** The adverse event type code. */
@ManyToOne
private AdverseEventTypeCode adverseEventTypeCode;
/** The allergen. */
@Embedded
private CodedConcept allergen;
/** The allergy reaction. */
@ManyToOne
private AllergyReactionCode allergyReaction;
/** The allergy status code. */
@ManyToOne
private AllergyStatusCode allergyStatusCode;
/** The allergy severity code. */
@ManyToOne
private AllergySeverityCode allergySeverityCode;
/** The allergy start date. */
@NotNull
@Temporal(TemporalType.TIMESTAMP)
@DateTimeFormat(style = "M-")
private Date allergyStartDate;
/** The allergy end date. */
@Temporal(TemporalType.TIMESTAMP)
@DateTimeFormat(style = "M-")
private Date allergyEndDate;
/** The patient. */
@ManyToOne
private Patient patient;
/**
* Gets the adverse event type code.
*
* @return the adverse event type code
*/
public AdverseEventTypeCode getAdverseEventTypeCode() {
return this.adverseEventTypeCode;
}
/**
* Sets the adverse event type code.
*
* @param adverseEventTypeCode the new adverse event type code
*/
public void setAdverseEventTypeCode(AdverseEventTypeCode adverseEventTypeCode) {
this.adverseEventTypeCode = adverseEventTypeCode;
}
/**
* Gets the allergen.
*
* @return the allergen
*/
public CodedConcept getAllergen() {
return this.allergen;
}
/**
* Sets the allergen.
*
* @param allergen the new allergen
*/
public void setAllergen(CodedConcept allergen) {
this.allergen = allergen;
}
/**
* Gets the allergy reaction.
*
* @return the allergy reaction
*/
public AllergyReactionCode getAllergyReaction() {
return this.allergyReaction;
}
/**
* Sets the allergy reaction.
*
* @param allergyReaction the new allergy reaction
*/
public void setAllergyReaction(AllergyReactionCode allergyReaction) {
this.allergyReaction = allergyReaction;
}
/**
* Gets the allergy status code.
*
* @return the allergy status code
*/
public AllergyStatusCode getAllergyStatusCode() {
return this.allergyStatusCode;
}
/**
* Sets the allergy status code.
*
* @param allergyStatusCode the new allergy status code
*/
public void setAllergyStatusCode(AllergyStatusCode allergyStatusCode) {
this.allergyStatusCode = allergyStatusCode;
}
/**
* Gets the allergy severity code.
*
* @return the allergy severity code
*/
public AllergySeverityCode getAllergySeverityCode() {
return this.allergySeverityCode;
}
/**
* Sets the allergy severity code.
*
* @param allergySeverityCode the new allergy severity code
*/
public void setAllergySeverityCode(AllergySeverityCode allergySeverityCode) {
this.allergySeverityCode = allergySeverityCode;
}
/**
* Gets the allergy start date.
*
* @return the allergy start date
*/
public Date getAllergyStartDate() {
return this.allergyStartDate;
}
/**
* Sets the allergy start date.
*
* @param allergyStartDate the new allergy start date
*/
public void setAllergyStartDate(Date allergyStartDate) {
this.allergyStartDate = allergyStartDate;
}
/**
* Gets the allergy end date.
*
* @return the allergy end date
*/
public Date getAllergyEndDate() {
return this.allergyEndDate;
}
/**
* Sets the allergy end date.
*
* @param allergyEndDate the new allergy end date
*/
public void setAllergyEndDate(Date allergyEndDate) {
this.allergyEndDate = allergyEndDate;
}
/**
* Gets the patient.
*
* @return the patient
*/
public Patient getPatient() {
return this.patient;
}
/**
* Sets the patient.
*
* @param patient the new patient
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
/**
* To json.
*
* @return the string
*/
public String toJson() {
return new JSONSerializer().exclude("*.class").deepSerialize(this);
}
/**
* From json to allergy.
*
* @param json the json
* @return the allergy
*/
public static Allergy fromJsonToAllergy(String json) {
return new JSONDeserializer<Allergy>().use(null, Allergy.class).deserialize(json);
}
/**
* To json array.
*
* @param collection the collection
* @return the string
*/
public static String toJsonArray(Collection<Allergy> collection) {
return new JSONSerializer().exclude("*.class").deepSerialize(collection);
}
/**
* From json array to allergys.
*
* @param json the json
* @return the collection
*/
public static Collection<Allergy> fromJsonArrayToAllergys(String json) {
return new JSONDeserializer<List<Allergy>>().use(null, ArrayList.class).use("values", Allergy.class).deserialize(json);
}
/** The id. */
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
/** The version. */
@Version
@Column(name = "version")
private Integer version;
/**
* Gets the id.
*
* @return the id
*/
public Long getId() {
return this.id;
}
/**
* Sets the id.
*
* @param id the new id
*/
public void setId(Long id) {
this.id = id;
}
/**
* Gets the version.
*
* @return the version
*/
public Integer getVersion() {
return this.version;
}
/**
* Sets the version.
*
* @param version the new version
*/
public void setVersion(Integer version) {
this.version = version;
}
}
| |
package redis.clients.jedis;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Common interface for sharded and non-sharded BinaryJedis
*/
public interface BinaryJedisCommands {
String set(byte[] key, byte[] value);
byte[] get(byte[] key);
Boolean exists(byte[] key);
Long persist(byte[] key);
String type(byte[] key);
Long expire(byte[] key, int seconds);
Long expireAt(byte[] key, long unixTime);
Long ttl(byte[] key);
Boolean setbit(byte[] key, long offset, boolean value);
Boolean setbit(byte[] key, long offset, byte[] value);
Boolean getbit(byte[] key, long offset);
Long setrange(byte[] key, long offset, byte[] value);
byte[] getrange(byte[] key, long startOffset, long endOffset);
byte[] getSet(byte[] key, byte[] value);
Long setnx(byte[] key, byte[] value);
String setex(byte[] key, int seconds, byte[] value);
Long decrBy(byte[] key, long integer);
Long decr(byte[] key);
Long incrBy(byte[] key, long integer);
Double incrByFloat(byte[] key, double value);
Long incr(byte[] key);
Long append(byte[] key, byte[] value);
byte[] substr(byte[] key, int start, int end);
Long hset(byte[] key, byte[] field, byte[] value);
byte[] hget(byte[] key, byte[] field);
Long hsetnx(byte[] key, byte[] field, byte[] value);
String hmset(byte[] key, Map<byte[], byte[]> hash);
List<byte[]> hmget(byte[] key, byte[]... fields);
Long hincrBy(byte[] key, byte[] field, long value);
Double hincrByFloat(byte[] key, byte[] field, double value);
Boolean hexists(byte[] key, byte[] field);
Long hdel(byte[] key, byte[]... field);
Long hlen(byte[] key);
Set<byte[]> hkeys(byte[] key);
Collection<byte[]> hvals(byte[] key);
Map<byte[], byte[]> hgetAll(byte[] key);
Long rpush(byte[] key, byte[]... args);
Long lpush(byte[] key, byte[]... args);
Long llen(byte[] key);
List<byte[]> lrange(byte[] key, long start, long end);
String ltrim(byte[] key, long start, long end);
byte[] lindex(byte[] key, long index);
String lset(byte[] key, long index, byte[] value);
Long lrem(byte[] key, long count, byte[] value);
byte[] lpop(byte[] key);
byte[] rpop(byte[] key);
Long sadd(byte[] key, byte[]... member);
Set<byte[]> smembers(byte[] key);
Long srem(byte[] key, byte[]... member);
byte[] spop(byte[] key);
Long scard(byte[] key);
Boolean sismember(byte[] key, byte[] member);
byte[] srandmember(byte[] key);
List<byte[]> srandmember(final byte[] key, final int count);
Long strlen(byte[] key);
Long zadd(byte[] key, double score, byte[] member);
Long zadd(byte[] key, Map<byte[], Double> scoreMembers);
Set<byte[]> zrange(byte[] key, long start, long end);
Long zrem(byte[] key, byte[]... member);
Double zincrby(byte[] key, double score, byte[] member);
Long zrank(byte[] key, byte[] member);
Long zrevrank(byte[] key, byte[] member);
Set<byte[]> zrevrange(byte[] key, long start, long end);
Set<Tuple> zrangeWithScores(byte[] key, long start, long end);
Set<Tuple> zrevrangeWithScores(byte[] key, long start, long end);
Long zcard(byte[] key);
Double zscore(byte[] key, byte[] member);
List<byte[]> sort(byte[] key);
List<byte[]> sort(byte[] key, SortingParams sortingParameters);
Long zcount(byte[] key, double min, double max);
Long zcount(byte[] key, byte[] min, byte[] max);
Set<byte[]> zrangeByScore(byte[] key, double min, double max);
Set<byte[]> zrangeByScore(byte[] key, byte[] min, byte[] max);
Set<byte[]> zrevrangeByScore(byte[] key, double max, double min);
Set<byte[]> zrangeByScore(byte[] key, double min, double max, int offset,
int count);
Set<byte[]> zrevrangeByScore(byte[] key, byte[] max, byte[] min);
Set<byte[]> zrangeByScore(byte[] key, byte[] min, byte[] max, int offset,
int count);
Set<byte[]> zrevrangeByScore(byte[] key, double max, double min,
int offset, int count);
Set<Tuple> zrangeByScoreWithScores(byte[] key, double min, double max);
Set<Tuple> zrevrangeByScoreWithScores(byte[] key, double max, double min);
Set<Tuple> zrangeByScoreWithScores(byte[] key, double min, double max,
int offset, int count);
Set<byte[]> zrevrangeByScore(byte[] key, byte[] max, byte[] min,
int offset, int count);
Set<Tuple> zrangeByScoreWithScores(byte[] key, byte[] min, byte[] max);
Set<Tuple> zrevrangeByScoreWithScores(byte[] key, byte[] max, byte[] min);
Set<Tuple> zrangeByScoreWithScores(byte[] key, byte[] min, byte[] max,
int offset, int count);
Set<Tuple> zrevrangeByScoreWithScores(byte[] key, double max, double min,
int offset, int count);
Set<Tuple> zrevrangeByScoreWithScores(byte[] key, byte[] max, byte[] min,
int offset, int count);
Long zremrangeByRank(byte[] key, long start, long end);
Long zremrangeByScore(byte[] key, double start, double end);
Long zremrangeByScore(byte[] key, byte[] start, byte[] end);
Long zlexcount(final byte[] key, final byte[] min, final byte[] max);
Set<byte[]> zrangeByLex(final byte[] key, final byte[] min, final byte[] max);
Set<byte[]> zrangeByLex(final byte[] key, final byte[] min, final byte[] max,
int offset, int count);
Long zremrangeByLex(final byte[] key, final byte[] min, final byte[] max);
Long linsert(byte[] key, Client.LIST_POSITION where, byte[] pivot,
byte[] value);
Long lpushx(byte[] key, byte[]... arg);
Long rpushx(byte[] key, byte[]... arg);
List<byte[]> blpop(byte[] arg);
List<byte[]> brpop(byte[] arg);
Long del(byte[] key);
byte[] echo(byte[] arg);
Long move(byte[] key, int dbIndex);
Long bitcount(final byte[] key);
Long bitcount(final byte[] key, long start, long end);
Long pfadd(final byte[] key, final byte[]... elements);
long pfcount(final byte[] key);
}
| |
package org.zstack.appliancevm;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.zstack.appliancevm.ApplianceVmCommands.InitCmd;
import org.zstack.appliancevm.ApplianceVmCommands.InitRsp;
import org.zstack.appliancevm.ApplianceVmConstant.Params;
import org.zstack.core.CoreGlobalProperty;
import org.zstack.core.ansible.AnsibleFacade;
import org.zstack.core.ansible.AnsibleGlobalProperty;
import org.zstack.core.ansible.AnsibleRunner;
import org.zstack.core.ansible.SshFileMd5Checker;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.timeout.ApiTimeoutManager;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.ReturnValueCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.message.MessageReply;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.vm.VmInstanceConstant;
import org.zstack.header.vm.VmInstanceConstant.VmOperation;
import org.zstack.header.vm.VmInstanceSpec;
import org.zstack.header.vm.VmNicInventory;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.function.Function;
import org.zstack.utils.path.PathUtil;
import java.util.Map;
import static org.zstack.core.Platform.operr;
/**
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class ApplianceVmDeployAgentFlow extends NoRollbackFlow {
@Autowired
private DatabaseFacade dbf;
@Autowired
private RESTFacade restf;
@Autowired
private CloudBus bus;
@Autowired
private ErrorFacade errf;
@Autowired
private ApiTimeoutManager apiTimeoutManager;
@Autowired
private AnsibleFacade asf;
private void continueConnect(final String echoUrl, final String apvmUuid, final FlowTrigger outerTrigger) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName("continue-connect-appliance-vm");
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "echo";
@Override
public void run(final FlowTrigger trigger, Map data) {
restf.echo(echoUrl, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "init";
@Override
public void run(final FlowTrigger trigger, Map data) {
InitCmd cmd = new InitCmd();
ApplianceVmAsyncHttpCallMsg msg = new ApplianceVmAsyncHttpCallMsg();
msg.setVmInstanceUuid(apvmUuid);
msg.setCommand(cmd);
msg.setCheckStatus(false);
msg.setPath(ApplianceVmConstant.INIT_PATH);
bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, apvmUuid);
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
ApplianceVmAsyncHttpCallReply ar = reply.castReply();
InitRsp rsp = ar.toResponse(InitRsp.class);
if (!rsp.isSuccess()) {
trigger.fail(operr("operation error, because:%s", rsp.getError()));
return;
}
trigger.next();
}
});
}
});
done(new FlowDoneHandler(outerTrigger) {
@Override
public void handle(Map data) {
outerTrigger.next();
}
});
error(new FlowErrorHandler(outerTrigger) {
@Override
public void handle(ErrorCode errCode, Map data) {
outerTrigger.fail(errCode);
}
});
}
}).start();
}
@Override
public void run(final FlowTrigger trigger, Map data) {
boolean isReconnect = Boolean.valueOf((String) data.get(Params.isReconnect.toString()));
final String apvmUuid;
String mgmtNicIp;
if (!isReconnect) {
VmNicInventory mgmtNic;
final VmInstanceSpec spec = (VmInstanceSpec) data.get(VmInstanceConstant.Params.VmInstanceSpec.toString());
if (spec.getCurrentVmOperation() == VmOperation.NewCreate) {
final ApplianceVmSpec aspec = spec.getExtensionData(ApplianceVmConstant.Params.applianceVmSpec.toString(), ApplianceVmSpec.class);
mgmtNic = CollectionUtils.find(spec.getDestNics(), new Function<VmNicInventory, VmNicInventory>() {
@Override
public VmNicInventory call(VmNicInventory arg) {
return arg.getL3NetworkUuid().equals(aspec.getManagementNic().getL3NetworkUuid()) ? arg : null;
}
});
apvmUuid = spec.getVmInventory().getUuid();
} else {
ApplianceVmVO avo = dbf.findByUuid(spec.getVmInventory().getUuid(), ApplianceVmVO.class);
ApplianceVmInventory ainv = ApplianceVmInventory.valueOf(avo);
mgmtNic = ainv.getManagementNic();
apvmUuid = avo.getUuid();
}
mgmtNicIp = mgmtNic.getIp();
} else {
mgmtNicIp = (String) data.get(Params.managementNicIp.toString());
apvmUuid = (String) data.get(Params.applianceVmUuid.toString());
}
final String mgmtIp = mgmtNicIp;
final String url = ApplianceVmBase.buildAgentUrl(mgmtIp, ApplianceVmConstant.ECHO_PATH, ApplianceVmGlobalProperty.AGENT_PORT);
if (CoreGlobalProperty.UNIT_TEST_ON) {
continueConnect(url, apvmUuid, trigger);
return;
} else if (!isReconnect && !ApplianceVmGlobalConfig.DEPLOY_AGENT_ON_START.value(Boolean.class)) {
continueConnect(url, apvmUuid, trigger);
return;
}
final String username = "root";
final String privKey = asf.getPrivateKey();
SshFileMd5Checker checker = new SshFileMd5Checker();
checker.setTargetIp(mgmtIp);
checker.setUsername(username);
checker.setPrivateKey(privKey);
checker.addSrcDestPair(SshFileMd5Checker.ZSTACKLIB_SRC_PATH, String.format("/var/lib/zstack/appliancevm/package/%s", AnsibleGlobalProperty.ZSTACKLIB_PACKAGE_NAME));
checker.addSrcDestPair(PathUtil.findFileOnClassPath(String.format("ansible/appliancevm/%s", ApplianceVmGlobalProperty.AGENT_PACKAGE_NAME), true).getAbsolutePath(),
String.format("/var/lib/zstack/appliancevm/package/%s", ApplianceVmGlobalProperty.AGENT_PACKAGE_NAME));
AnsibleRunner runner = new AnsibleRunner();
runner.installChecker(checker);
runner.setUsername(username);
runner.setPlayBookName(ApplianceVmConstant.ANSIBLE_PLAYBOOK_NAME);
runner.setPrivateKey(privKey);
runner.setAgentPort(ApplianceVmGlobalProperty.AGENT_PORT);
runner.setTargetIp(mgmtIp);
runner.putArgument("pkg_appliancevm", ApplianceVmGlobalProperty.AGENT_PACKAGE_NAME);
if (CoreGlobalProperty.SYNC_NODE_TIME) {
if (CoreGlobalProperty.CHRONY_SERVERS == null || CoreGlobalProperty.CHRONY_SERVERS.isEmpty()) {
trigger.fail(operr("chrony server not configured!"));
return;
}
runner.putArgument("chrony_servers", String.join(",", CoreGlobalProperty.CHRONY_SERVERS));
}
runner.run(new ReturnValueCompletion<Boolean>(trigger) {
@Override
public void success(Boolean deployed) {
continueConnect(url, apvmUuid, trigger);
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.dossiermgt.service.base;
import com.liferay.portal.kernel.bean.BeanReference;
import com.liferay.portal.kernel.bean.IdentifiableBean;
import com.liferay.portal.kernel.dao.jdbc.SqlUpdate;
import com.liferay.portal.kernel.dao.jdbc.SqlUpdateFactoryUtil;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.service.BaseServiceImpl;
import com.liferay.portal.service.persistence.UserPersistence;
import org.oep.dossiermgt.model.DossierProcAgent;
import org.oep.dossiermgt.service.DossierProcAgentService;
import org.oep.dossiermgt.service.persistence.DocFilePersistence;
import org.oep.dossiermgt.service.persistence.DocFileVersionPersistence;
import org.oep.dossiermgt.service.persistence.DocTemplateFinder;
import org.oep.dossiermgt.service.persistence.DocTemplatePersistence;
import org.oep.dossiermgt.service.persistence.DossierDocPersistence;
import org.oep.dossiermgt.service.persistence.DossierFolder2RolePersistence;
import org.oep.dossiermgt.service.persistence.DossierFolderPersistence;
import org.oep.dossiermgt.service.persistence.DossierPersistence;
import org.oep.dossiermgt.service.persistence.DossierProcAgentPersistence;
import org.oep.dossiermgt.service.persistence.DossierProcBookmarkPersistence;
import org.oep.dossiermgt.service.persistence.DossierProcFinder;
import org.oep.dossiermgt.service.persistence.DossierProcPersistence;
import org.oep.dossiermgt.service.persistence.DossierTagPersistence;
import org.oep.dossiermgt.service.persistence.EbMessagePersistence;
import org.oep.dossiermgt.service.persistence.EbPartnerShipFinder;
import org.oep.dossiermgt.service.persistence.EbPartnerShipPersistence;
import org.oep.dossiermgt.service.persistence.PaymentConfigPersistence;
import org.oep.dossiermgt.service.persistence.PaymentFilePersistence;
import org.oep.dossiermgt.service.persistence.PaymentRequestPersistence;
import org.oep.dossiermgt.service.persistence.ProfileDataPersistence;
import org.oep.dossiermgt.service.persistence.StatisticByAgencyPersistence;
import org.oep.dossiermgt.service.persistence.StatisticByDayPersistence;
import org.oep.dossiermgt.service.persistence.StatisticByDomainPersistence;
import javax.sql.DataSource;
/**
* Provides the base implementation for the dossier proc agent remote service.
*
* <p>
* This implementation exists only as a container for the default service methods generated by ServiceBuilder. All custom service methods should be put in {@link org.oep.dossiermgt.service.impl.DossierProcAgentServiceImpl}.
* </p>
*
* @author trungdk
* @see org.oep.dossiermgt.service.impl.DossierProcAgentServiceImpl
* @see org.oep.dossiermgt.service.DossierProcAgentServiceUtil
* @generated
*/
public abstract class DossierProcAgentServiceBaseImpl extends BaseServiceImpl
implements DossierProcAgentService, IdentifiableBean {
/*
* NOTE FOR DEVELOPERS:
*
* Never modify or reference this class directly. Always use {@link org.oep.dossiermgt.service.DossierProcAgentServiceUtil} to access the dossier proc agent remote service.
*/
/**
* Returns the doc file local service.
*
* @return the doc file local service
*/
public org.oep.dossiermgt.service.DocFileLocalService getDocFileLocalService() {
return docFileLocalService;
}
/**
* Sets the doc file local service.
*
* @param docFileLocalService the doc file local service
*/
public void setDocFileLocalService(
org.oep.dossiermgt.service.DocFileLocalService docFileLocalService) {
this.docFileLocalService = docFileLocalService;
}
/**
* Returns the doc file remote service.
*
* @return the doc file remote service
*/
public org.oep.dossiermgt.service.DocFileService getDocFileService() {
return docFileService;
}
/**
* Sets the doc file remote service.
*
* @param docFileService the doc file remote service
*/
public void setDocFileService(
org.oep.dossiermgt.service.DocFileService docFileService) {
this.docFileService = docFileService;
}
/**
* Returns the doc file persistence.
*
* @return the doc file persistence
*/
public DocFilePersistence getDocFilePersistence() {
return docFilePersistence;
}
/**
* Sets the doc file persistence.
*
* @param docFilePersistence the doc file persistence
*/
public void setDocFilePersistence(DocFilePersistence docFilePersistence) {
this.docFilePersistence = docFilePersistence;
}
/**
* Returns the doc file version local service.
*
* @return the doc file version local service
*/
public org.oep.dossiermgt.service.DocFileVersionLocalService getDocFileVersionLocalService() {
return docFileVersionLocalService;
}
/**
* Sets the doc file version local service.
*
* @param docFileVersionLocalService the doc file version local service
*/
public void setDocFileVersionLocalService(
org.oep.dossiermgt.service.DocFileVersionLocalService docFileVersionLocalService) {
this.docFileVersionLocalService = docFileVersionLocalService;
}
/**
* Returns the doc file version remote service.
*
* @return the doc file version remote service
*/
public org.oep.dossiermgt.service.DocFileVersionService getDocFileVersionService() {
return docFileVersionService;
}
/**
* Sets the doc file version remote service.
*
* @param docFileVersionService the doc file version remote service
*/
public void setDocFileVersionService(
org.oep.dossiermgt.service.DocFileVersionService docFileVersionService) {
this.docFileVersionService = docFileVersionService;
}
/**
* Returns the doc file version persistence.
*
* @return the doc file version persistence
*/
public DocFileVersionPersistence getDocFileVersionPersistence() {
return docFileVersionPersistence;
}
/**
* Sets the doc file version persistence.
*
* @param docFileVersionPersistence the doc file version persistence
*/
public void setDocFileVersionPersistence(
DocFileVersionPersistence docFileVersionPersistence) {
this.docFileVersionPersistence = docFileVersionPersistence;
}
/**
* Returns the doc template local service.
*
* @return the doc template local service
*/
public org.oep.dossiermgt.service.DocTemplateLocalService getDocTemplateLocalService() {
return docTemplateLocalService;
}
/**
* Sets the doc template local service.
*
* @param docTemplateLocalService the doc template local service
*/
public void setDocTemplateLocalService(
org.oep.dossiermgt.service.DocTemplateLocalService docTemplateLocalService) {
this.docTemplateLocalService = docTemplateLocalService;
}
/**
* Returns the doc template remote service.
*
* @return the doc template remote service
*/
public org.oep.dossiermgt.service.DocTemplateService getDocTemplateService() {
return docTemplateService;
}
/**
* Sets the doc template remote service.
*
* @param docTemplateService the doc template remote service
*/
public void setDocTemplateService(
org.oep.dossiermgt.service.DocTemplateService docTemplateService) {
this.docTemplateService = docTemplateService;
}
/**
* Returns the doc template persistence.
*
* @return the doc template persistence
*/
public DocTemplatePersistence getDocTemplatePersistence() {
return docTemplatePersistence;
}
/**
* Sets the doc template persistence.
*
* @param docTemplatePersistence the doc template persistence
*/
public void setDocTemplatePersistence(
DocTemplatePersistence docTemplatePersistence) {
this.docTemplatePersistence = docTemplatePersistence;
}
/**
* Returns the doc template finder.
*
* @return the doc template finder
*/
public DocTemplateFinder getDocTemplateFinder() {
return docTemplateFinder;
}
/**
* Sets the doc template finder.
*
* @param docTemplateFinder the doc template finder
*/
public void setDocTemplateFinder(DocTemplateFinder docTemplateFinder) {
this.docTemplateFinder = docTemplateFinder;
}
/**
* Returns the dossier local service.
*
* @return the dossier local service
*/
public org.oep.dossiermgt.service.DossierLocalService getDossierLocalService() {
return dossierLocalService;
}
/**
* Sets the dossier local service.
*
* @param dossierLocalService the dossier local service
*/
public void setDossierLocalService(
org.oep.dossiermgt.service.DossierLocalService dossierLocalService) {
this.dossierLocalService = dossierLocalService;
}
/**
* Returns the dossier persistence.
*
* @return the dossier persistence
*/
public DossierPersistence getDossierPersistence() {
return dossierPersistence;
}
/**
* Sets the dossier persistence.
*
* @param dossierPersistence the dossier persistence
*/
public void setDossierPersistence(DossierPersistence dossierPersistence) {
this.dossierPersistence = dossierPersistence;
}
/**
* Returns the dossier doc local service.
*
* @return the dossier doc local service
*/
public org.oep.dossiermgt.service.DossierDocLocalService getDossierDocLocalService() {
return dossierDocLocalService;
}
/**
* Sets the dossier doc local service.
*
* @param dossierDocLocalService the dossier doc local service
*/
public void setDossierDocLocalService(
org.oep.dossiermgt.service.DossierDocLocalService dossierDocLocalService) {
this.dossierDocLocalService = dossierDocLocalService;
}
/**
* Returns the dossier doc remote service.
*
* @return the dossier doc remote service
*/
public org.oep.dossiermgt.service.DossierDocService getDossierDocService() {
return dossierDocService;
}
/**
* Sets the dossier doc remote service.
*
* @param dossierDocService the dossier doc remote service
*/
public void setDossierDocService(
org.oep.dossiermgt.service.DossierDocService dossierDocService) {
this.dossierDocService = dossierDocService;
}
/**
* Returns the dossier doc persistence.
*
* @return the dossier doc persistence
*/
public DossierDocPersistence getDossierDocPersistence() {
return dossierDocPersistence;
}
/**
* Sets the dossier doc persistence.
*
* @param dossierDocPersistence the dossier doc persistence
*/
public void setDossierDocPersistence(
DossierDocPersistence dossierDocPersistence) {
this.dossierDocPersistence = dossierDocPersistence;
}
/**
* Returns the dossier folder local service.
*
* @return the dossier folder local service
*/
public org.oep.dossiermgt.service.DossierFolderLocalService getDossierFolderLocalService() {
return dossierFolderLocalService;
}
/**
* Sets the dossier folder local service.
*
* @param dossierFolderLocalService the dossier folder local service
*/
public void setDossierFolderLocalService(
org.oep.dossiermgt.service.DossierFolderLocalService dossierFolderLocalService) {
this.dossierFolderLocalService = dossierFolderLocalService;
}
/**
* Returns the dossier folder remote service.
*
* @return the dossier folder remote service
*/
public org.oep.dossiermgt.service.DossierFolderService getDossierFolderService() {
return dossierFolderService;
}
/**
* Sets the dossier folder remote service.
*
* @param dossierFolderService the dossier folder remote service
*/
public void setDossierFolderService(
org.oep.dossiermgt.service.DossierFolderService dossierFolderService) {
this.dossierFolderService = dossierFolderService;
}
/**
* Returns the dossier folder persistence.
*
* @return the dossier folder persistence
*/
public DossierFolderPersistence getDossierFolderPersistence() {
return dossierFolderPersistence;
}
/**
* Sets the dossier folder persistence.
*
* @param dossierFolderPersistence the dossier folder persistence
*/
public void setDossierFolderPersistence(
DossierFolderPersistence dossierFolderPersistence) {
this.dossierFolderPersistence = dossierFolderPersistence;
}
/**
* Returns the dossier folder2 role local service.
*
* @return the dossier folder2 role local service
*/
public org.oep.dossiermgt.service.DossierFolder2RoleLocalService getDossierFolder2RoleLocalService() {
return dossierFolder2RoleLocalService;
}
/**
* Sets the dossier folder2 role local service.
*
* @param dossierFolder2RoleLocalService the dossier folder2 role local service
*/
public void setDossierFolder2RoleLocalService(
org.oep.dossiermgt.service.DossierFolder2RoleLocalService dossierFolder2RoleLocalService) {
this.dossierFolder2RoleLocalService = dossierFolder2RoleLocalService;
}
/**
* Returns the dossier folder2 role remote service.
*
* @return the dossier folder2 role remote service
*/
public org.oep.dossiermgt.service.DossierFolder2RoleService getDossierFolder2RoleService() {
return dossierFolder2RoleService;
}
/**
* Sets the dossier folder2 role remote service.
*
* @param dossierFolder2RoleService the dossier folder2 role remote service
*/
public void setDossierFolder2RoleService(
org.oep.dossiermgt.service.DossierFolder2RoleService dossierFolder2RoleService) {
this.dossierFolder2RoleService = dossierFolder2RoleService;
}
/**
* Returns the dossier folder2 role persistence.
*
* @return the dossier folder2 role persistence
*/
public DossierFolder2RolePersistence getDossierFolder2RolePersistence() {
return dossierFolder2RolePersistence;
}
/**
* Sets the dossier folder2 role persistence.
*
* @param dossierFolder2RolePersistence the dossier folder2 role persistence
*/
public void setDossierFolder2RolePersistence(
DossierFolder2RolePersistence dossierFolder2RolePersistence) {
this.dossierFolder2RolePersistence = dossierFolder2RolePersistence;
}
/**
* Returns the dossier proc local service.
*
* @return the dossier proc local service
*/
public org.oep.dossiermgt.service.DossierProcLocalService getDossierProcLocalService() {
return dossierProcLocalService;
}
/**
* Sets the dossier proc local service.
*
* @param dossierProcLocalService the dossier proc local service
*/
public void setDossierProcLocalService(
org.oep.dossiermgt.service.DossierProcLocalService dossierProcLocalService) {
this.dossierProcLocalService = dossierProcLocalService;
}
/**
* Returns the dossier proc remote service.
*
* @return the dossier proc remote service
*/
public org.oep.dossiermgt.service.DossierProcService getDossierProcService() {
return dossierProcService;
}
/**
* Sets the dossier proc remote service.
*
* @param dossierProcService the dossier proc remote service
*/
public void setDossierProcService(
org.oep.dossiermgt.service.DossierProcService dossierProcService) {
this.dossierProcService = dossierProcService;
}
/**
* Returns the dossier proc persistence.
*
* @return the dossier proc persistence
*/
public DossierProcPersistence getDossierProcPersistence() {
return dossierProcPersistence;
}
/**
* Sets the dossier proc persistence.
*
* @param dossierProcPersistence the dossier proc persistence
*/
public void setDossierProcPersistence(
DossierProcPersistence dossierProcPersistence) {
this.dossierProcPersistence = dossierProcPersistence;
}
/**
* Returns the dossier proc finder.
*
* @return the dossier proc finder
*/
public DossierProcFinder getDossierProcFinder() {
return dossierProcFinder;
}
/**
* Sets the dossier proc finder.
*
* @param dossierProcFinder the dossier proc finder
*/
public void setDossierProcFinder(DossierProcFinder dossierProcFinder) {
this.dossierProcFinder = dossierProcFinder;
}
/**
* Returns the dossier proc agent local service.
*
* @return the dossier proc agent local service
*/
public org.oep.dossiermgt.service.DossierProcAgentLocalService getDossierProcAgentLocalService() {
return dossierProcAgentLocalService;
}
/**
* Sets the dossier proc agent local service.
*
* @param dossierProcAgentLocalService the dossier proc agent local service
*/
public void setDossierProcAgentLocalService(
org.oep.dossiermgt.service.DossierProcAgentLocalService dossierProcAgentLocalService) {
this.dossierProcAgentLocalService = dossierProcAgentLocalService;
}
/**
* Returns the dossier proc agent remote service.
*
* @return the dossier proc agent remote service
*/
public org.oep.dossiermgt.service.DossierProcAgentService getDossierProcAgentService() {
return dossierProcAgentService;
}
/**
* Sets the dossier proc agent remote service.
*
* @param dossierProcAgentService the dossier proc agent remote service
*/
public void setDossierProcAgentService(
org.oep.dossiermgt.service.DossierProcAgentService dossierProcAgentService) {
this.dossierProcAgentService = dossierProcAgentService;
}
/**
* Returns the dossier proc agent persistence.
*
* @return the dossier proc agent persistence
*/
public DossierProcAgentPersistence getDossierProcAgentPersistence() {
return dossierProcAgentPersistence;
}
/**
* Sets the dossier proc agent persistence.
*
* @param dossierProcAgentPersistence the dossier proc agent persistence
*/
public void setDossierProcAgentPersistence(
DossierProcAgentPersistence dossierProcAgentPersistence) {
this.dossierProcAgentPersistence = dossierProcAgentPersistence;
}
/**
* Returns the dossier proc bookmark local service.
*
* @return the dossier proc bookmark local service
*/
public org.oep.dossiermgt.service.DossierProcBookmarkLocalService getDossierProcBookmarkLocalService() {
return dossierProcBookmarkLocalService;
}
/**
* Sets the dossier proc bookmark local service.
*
* @param dossierProcBookmarkLocalService the dossier proc bookmark local service
*/
public void setDossierProcBookmarkLocalService(
org.oep.dossiermgt.service.DossierProcBookmarkLocalService dossierProcBookmarkLocalService) {
this.dossierProcBookmarkLocalService = dossierProcBookmarkLocalService;
}
/**
* Returns the dossier proc bookmark remote service.
*
* @return the dossier proc bookmark remote service
*/
public org.oep.dossiermgt.service.DossierProcBookmarkService getDossierProcBookmarkService() {
return dossierProcBookmarkService;
}
/**
* Sets the dossier proc bookmark remote service.
*
* @param dossierProcBookmarkService the dossier proc bookmark remote service
*/
public void setDossierProcBookmarkService(
org.oep.dossiermgt.service.DossierProcBookmarkService dossierProcBookmarkService) {
this.dossierProcBookmarkService = dossierProcBookmarkService;
}
/**
* Returns the dossier proc bookmark persistence.
*
* @return the dossier proc bookmark persistence
*/
public DossierProcBookmarkPersistence getDossierProcBookmarkPersistence() {
return dossierProcBookmarkPersistence;
}
/**
* Sets the dossier proc bookmark persistence.
*
* @param dossierProcBookmarkPersistence the dossier proc bookmark persistence
*/
public void setDossierProcBookmarkPersistence(
DossierProcBookmarkPersistence dossierProcBookmarkPersistence) {
this.dossierProcBookmarkPersistence = dossierProcBookmarkPersistence;
}
/**
* Returns the dossier tag local service.
*
* @return the dossier tag local service
*/
public org.oep.dossiermgt.service.DossierTagLocalService getDossierTagLocalService() {
return dossierTagLocalService;
}
/**
* Sets the dossier tag local service.
*
* @param dossierTagLocalService the dossier tag local service
*/
public void setDossierTagLocalService(
org.oep.dossiermgt.service.DossierTagLocalService dossierTagLocalService) {
this.dossierTagLocalService = dossierTagLocalService;
}
/**
* Returns the dossier tag remote service.
*
* @return the dossier tag remote service
*/
public org.oep.dossiermgt.service.DossierTagService getDossierTagService() {
return dossierTagService;
}
/**
* Sets the dossier tag remote service.
*
* @param dossierTagService the dossier tag remote service
*/
public void setDossierTagService(
org.oep.dossiermgt.service.DossierTagService dossierTagService) {
this.dossierTagService = dossierTagService;
}
/**
* Returns the dossier tag persistence.
*
* @return the dossier tag persistence
*/
public DossierTagPersistence getDossierTagPersistence() {
return dossierTagPersistence;
}
/**
* Sets the dossier tag persistence.
*
* @param dossierTagPersistence the dossier tag persistence
*/
public void setDossierTagPersistence(
DossierTagPersistence dossierTagPersistence) {
this.dossierTagPersistence = dossierTagPersistence;
}
/**
* Returns the eb message local service.
*
* @return the eb message local service
*/
public org.oep.dossiermgt.service.EbMessageLocalService getEbMessageLocalService() {
return ebMessageLocalService;
}
/**
* Sets the eb message local service.
*
* @param ebMessageLocalService the eb message local service
*/
public void setEbMessageLocalService(
org.oep.dossiermgt.service.EbMessageLocalService ebMessageLocalService) {
this.ebMessageLocalService = ebMessageLocalService;
}
/**
* Returns the eb message remote service.
*
* @return the eb message remote service
*/
public org.oep.dossiermgt.service.EbMessageService getEbMessageService() {
return ebMessageService;
}
/**
* Sets the eb message remote service.
*
* @param ebMessageService the eb message remote service
*/
public void setEbMessageService(
org.oep.dossiermgt.service.EbMessageService ebMessageService) {
this.ebMessageService = ebMessageService;
}
/**
* Returns the eb message persistence.
*
* @return the eb message persistence
*/
public EbMessagePersistence getEbMessagePersistence() {
return ebMessagePersistence;
}
/**
* Sets the eb message persistence.
*
* @param ebMessagePersistence the eb message persistence
*/
public void setEbMessagePersistence(
EbMessagePersistence ebMessagePersistence) {
this.ebMessagePersistence = ebMessagePersistence;
}
/**
* Returns the eb partner ship local service.
*
* @return the eb partner ship local service
*/
public org.oep.dossiermgt.service.EbPartnerShipLocalService getEbPartnerShipLocalService() {
return ebPartnerShipLocalService;
}
/**
* Sets the eb partner ship local service.
*
* @param ebPartnerShipLocalService the eb partner ship local service
*/
public void setEbPartnerShipLocalService(
org.oep.dossiermgt.service.EbPartnerShipLocalService ebPartnerShipLocalService) {
this.ebPartnerShipLocalService = ebPartnerShipLocalService;
}
/**
* Returns the eb partner ship remote service.
*
* @return the eb partner ship remote service
*/
public org.oep.dossiermgt.service.EbPartnerShipService getEbPartnerShipService() {
return ebPartnerShipService;
}
/**
* Sets the eb partner ship remote service.
*
* @param ebPartnerShipService the eb partner ship remote service
*/
public void setEbPartnerShipService(
org.oep.dossiermgt.service.EbPartnerShipService ebPartnerShipService) {
this.ebPartnerShipService = ebPartnerShipService;
}
/**
* Returns the eb partner ship persistence.
*
* @return the eb partner ship persistence
*/
public EbPartnerShipPersistence getEbPartnerShipPersistence() {
return ebPartnerShipPersistence;
}
/**
* Sets the eb partner ship persistence.
*
* @param ebPartnerShipPersistence the eb partner ship persistence
*/
public void setEbPartnerShipPersistence(
EbPartnerShipPersistence ebPartnerShipPersistence) {
this.ebPartnerShipPersistence = ebPartnerShipPersistence;
}
/**
* Returns the eb partner ship finder.
*
* @return the eb partner ship finder
*/
public EbPartnerShipFinder getEbPartnerShipFinder() {
return ebPartnerShipFinder;
}
/**
* Sets the eb partner ship finder.
*
* @param ebPartnerShipFinder the eb partner ship finder
*/
public void setEbPartnerShipFinder(EbPartnerShipFinder ebPartnerShipFinder) {
this.ebPartnerShipFinder = ebPartnerShipFinder;
}
/**
* Returns the payment config local service.
*
* @return the payment config local service
*/
public org.oep.dossiermgt.service.PaymentConfigLocalService getPaymentConfigLocalService() {
return paymentConfigLocalService;
}
/**
* Sets the payment config local service.
*
* @param paymentConfigLocalService the payment config local service
*/
public void setPaymentConfigLocalService(
org.oep.dossiermgt.service.PaymentConfigLocalService paymentConfigLocalService) {
this.paymentConfigLocalService = paymentConfigLocalService;
}
/**
* Returns the payment config remote service.
*
* @return the payment config remote service
*/
public org.oep.dossiermgt.service.PaymentConfigService getPaymentConfigService() {
return paymentConfigService;
}
/**
* Sets the payment config remote service.
*
* @param paymentConfigService the payment config remote service
*/
public void setPaymentConfigService(
org.oep.dossiermgt.service.PaymentConfigService paymentConfigService) {
this.paymentConfigService = paymentConfigService;
}
/**
* Returns the payment config persistence.
*
* @return the payment config persistence
*/
public PaymentConfigPersistence getPaymentConfigPersistence() {
return paymentConfigPersistence;
}
/**
* Sets the payment config persistence.
*
* @param paymentConfigPersistence the payment config persistence
*/
public void setPaymentConfigPersistence(
PaymentConfigPersistence paymentConfigPersistence) {
this.paymentConfigPersistence = paymentConfigPersistence;
}
/**
* Returns the payment file local service.
*
* @return the payment file local service
*/
public org.oep.dossiermgt.service.PaymentFileLocalService getPaymentFileLocalService() {
return paymentFileLocalService;
}
/**
* Sets the payment file local service.
*
* @param paymentFileLocalService the payment file local service
*/
public void setPaymentFileLocalService(
org.oep.dossiermgt.service.PaymentFileLocalService paymentFileLocalService) {
this.paymentFileLocalService = paymentFileLocalService;
}
/**
* Returns the payment file remote service.
*
* @return the payment file remote service
*/
public org.oep.dossiermgt.service.PaymentFileService getPaymentFileService() {
return paymentFileService;
}
/**
* Sets the payment file remote service.
*
* @param paymentFileService the payment file remote service
*/
public void setPaymentFileService(
org.oep.dossiermgt.service.PaymentFileService paymentFileService) {
this.paymentFileService = paymentFileService;
}
/**
* Returns the payment file persistence.
*
* @return the payment file persistence
*/
public PaymentFilePersistence getPaymentFilePersistence() {
return paymentFilePersistence;
}
/**
* Sets the payment file persistence.
*
* @param paymentFilePersistence the payment file persistence
*/
public void setPaymentFilePersistence(
PaymentFilePersistence paymentFilePersistence) {
this.paymentFilePersistence = paymentFilePersistence;
}
/**
* Returns the payment request local service.
*
* @return the payment request local service
*/
public org.oep.dossiermgt.service.PaymentRequestLocalService getPaymentRequestLocalService() {
return paymentRequestLocalService;
}
/**
* Sets the payment request local service.
*
* @param paymentRequestLocalService the payment request local service
*/
public void setPaymentRequestLocalService(
org.oep.dossiermgt.service.PaymentRequestLocalService paymentRequestLocalService) {
this.paymentRequestLocalService = paymentRequestLocalService;
}
/**
* Returns the payment request remote service.
*
* @return the payment request remote service
*/
public org.oep.dossiermgt.service.PaymentRequestService getPaymentRequestService() {
return paymentRequestService;
}
/**
* Sets the payment request remote service.
*
* @param paymentRequestService the payment request remote service
*/
public void setPaymentRequestService(
org.oep.dossiermgt.service.PaymentRequestService paymentRequestService) {
this.paymentRequestService = paymentRequestService;
}
/**
* Returns the payment request persistence.
*
* @return the payment request persistence
*/
public PaymentRequestPersistence getPaymentRequestPersistence() {
return paymentRequestPersistence;
}
/**
* Sets the payment request persistence.
*
* @param paymentRequestPersistence the payment request persistence
*/
public void setPaymentRequestPersistence(
PaymentRequestPersistence paymentRequestPersistence) {
this.paymentRequestPersistence = paymentRequestPersistence;
}
/**
* Returns the profile data local service.
*
* @return the profile data local service
*/
public org.oep.dossiermgt.service.ProfileDataLocalService getProfileDataLocalService() {
return profileDataLocalService;
}
/**
* Sets the profile data local service.
*
* @param profileDataLocalService the profile data local service
*/
public void setProfileDataLocalService(
org.oep.dossiermgt.service.ProfileDataLocalService profileDataLocalService) {
this.profileDataLocalService = profileDataLocalService;
}
/**
* Returns the profile data remote service.
*
* @return the profile data remote service
*/
public org.oep.dossiermgt.service.ProfileDataService getProfileDataService() {
return profileDataService;
}
/**
* Sets the profile data remote service.
*
* @param profileDataService the profile data remote service
*/
public void setProfileDataService(
org.oep.dossiermgt.service.ProfileDataService profileDataService) {
this.profileDataService = profileDataService;
}
/**
* Returns the profile data persistence.
*
* @return the profile data persistence
*/
public ProfileDataPersistence getProfileDataPersistence() {
return profileDataPersistence;
}
/**
* Sets the profile data persistence.
*
* @param profileDataPersistence the profile data persistence
*/
public void setProfileDataPersistence(
ProfileDataPersistence profileDataPersistence) {
this.profileDataPersistence = profileDataPersistence;
}
/**
* Returns the statistic by agency local service.
*
* @return the statistic by agency local service
*/
public org.oep.dossiermgt.service.StatisticByAgencyLocalService getStatisticByAgencyLocalService() {
return statisticByAgencyLocalService;
}
/**
* Sets the statistic by agency local service.
*
* @param statisticByAgencyLocalService the statistic by agency local service
*/
public void setStatisticByAgencyLocalService(
org.oep.dossiermgt.service.StatisticByAgencyLocalService statisticByAgencyLocalService) {
this.statisticByAgencyLocalService = statisticByAgencyLocalService;
}
/**
* Returns the statistic by agency remote service.
*
* @return the statistic by agency remote service
*/
public org.oep.dossiermgt.service.StatisticByAgencyService getStatisticByAgencyService() {
return statisticByAgencyService;
}
/**
* Sets the statistic by agency remote service.
*
* @param statisticByAgencyService the statistic by agency remote service
*/
public void setStatisticByAgencyService(
org.oep.dossiermgt.service.StatisticByAgencyService statisticByAgencyService) {
this.statisticByAgencyService = statisticByAgencyService;
}
/**
* Returns the statistic by agency persistence.
*
* @return the statistic by agency persistence
*/
public StatisticByAgencyPersistence getStatisticByAgencyPersistence() {
return statisticByAgencyPersistence;
}
/**
* Sets the statistic by agency persistence.
*
* @param statisticByAgencyPersistence the statistic by agency persistence
*/
public void setStatisticByAgencyPersistence(
StatisticByAgencyPersistence statisticByAgencyPersistence) {
this.statisticByAgencyPersistence = statisticByAgencyPersistence;
}
/**
* Returns the statistic by day local service.
*
* @return the statistic by day local service
*/
public org.oep.dossiermgt.service.StatisticByDayLocalService getStatisticByDayLocalService() {
return statisticByDayLocalService;
}
/**
* Sets the statistic by day local service.
*
* @param statisticByDayLocalService the statistic by day local service
*/
public void setStatisticByDayLocalService(
org.oep.dossiermgt.service.StatisticByDayLocalService statisticByDayLocalService) {
this.statisticByDayLocalService = statisticByDayLocalService;
}
/**
* Returns the statistic by day remote service.
*
* @return the statistic by day remote service
*/
public org.oep.dossiermgt.service.StatisticByDayService getStatisticByDayService() {
return statisticByDayService;
}
/**
* Sets the statistic by day remote service.
*
* @param statisticByDayService the statistic by day remote service
*/
public void setStatisticByDayService(
org.oep.dossiermgt.service.StatisticByDayService statisticByDayService) {
this.statisticByDayService = statisticByDayService;
}
/**
* Returns the statistic by day persistence.
*
* @return the statistic by day persistence
*/
public StatisticByDayPersistence getStatisticByDayPersistence() {
return statisticByDayPersistence;
}
/**
* Sets the statistic by day persistence.
*
* @param statisticByDayPersistence the statistic by day persistence
*/
public void setStatisticByDayPersistence(
StatisticByDayPersistence statisticByDayPersistence) {
this.statisticByDayPersistence = statisticByDayPersistence;
}
/**
* Returns the statistic by domain local service.
*
* @return the statistic by domain local service
*/
public org.oep.dossiermgt.service.StatisticByDomainLocalService getStatisticByDomainLocalService() {
return statisticByDomainLocalService;
}
/**
* Sets the statistic by domain local service.
*
* @param statisticByDomainLocalService the statistic by domain local service
*/
public void setStatisticByDomainLocalService(
org.oep.dossiermgt.service.StatisticByDomainLocalService statisticByDomainLocalService) {
this.statisticByDomainLocalService = statisticByDomainLocalService;
}
/**
* Returns the statistic by domain remote service.
*
* @return the statistic by domain remote service
*/
public org.oep.dossiermgt.service.StatisticByDomainService getStatisticByDomainService() {
return statisticByDomainService;
}
/**
* Sets the statistic by domain remote service.
*
* @param statisticByDomainService the statistic by domain remote service
*/
public void setStatisticByDomainService(
org.oep.dossiermgt.service.StatisticByDomainService statisticByDomainService) {
this.statisticByDomainService = statisticByDomainService;
}
/**
* Returns the statistic by domain persistence.
*
* @return the statistic by domain persistence
*/
public StatisticByDomainPersistence getStatisticByDomainPersistence() {
return statisticByDomainPersistence;
}
/**
* Sets the statistic by domain persistence.
*
* @param statisticByDomainPersistence the statistic by domain persistence
*/
public void setStatisticByDomainPersistence(
StatisticByDomainPersistence statisticByDomainPersistence) {
this.statisticByDomainPersistence = statisticByDomainPersistence;
}
/**
* Returns the counter local service.
*
* @return the counter local service
*/
public com.liferay.counter.service.CounterLocalService getCounterLocalService() {
return counterLocalService;
}
/**
* Sets the counter local service.
*
* @param counterLocalService the counter local service
*/
public void setCounterLocalService(
com.liferay.counter.service.CounterLocalService counterLocalService) {
this.counterLocalService = counterLocalService;
}
/**
* Returns the resource local service.
*
* @return the resource local service
*/
public com.liferay.portal.service.ResourceLocalService getResourceLocalService() {
return resourceLocalService;
}
/**
* Sets the resource local service.
*
* @param resourceLocalService the resource local service
*/
public void setResourceLocalService(
com.liferay.portal.service.ResourceLocalService resourceLocalService) {
this.resourceLocalService = resourceLocalService;
}
/**
* Returns the user local service.
*
* @return the user local service
*/
public com.liferay.portal.service.UserLocalService getUserLocalService() {
return userLocalService;
}
/**
* Sets the user local service.
*
* @param userLocalService the user local service
*/
public void setUserLocalService(
com.liferay.portal.service.UserLocalService userLocalService) {
this.userLocalService = userLocalService;
}
/**
* Returns the user remote service.
*
* @return the user remote service
*/
public com.liferay.portal.service.UserService getUserService() {
return userService;
}
/**
* Sets the user remote service.
*
* @param userService the user remote service
*/
public void setUserService(
com.liferay.portal.service.UserService userService) {
this.userService = userService;
}
/**
* Returns the user persistence.
*
* @return the user persistence
*/
public UserPersistence getUserPersistence() {
return userPersistence;
}
/**
* Sets the user persistence.
*
* @param userPersistence the user persistence
*/
public void setUserPersistence(UserPersistence userPersistence) {
this.userPersistence = userPersistence;
}
public void afterPropertiesSet() {
Class<?> clazz = getClass();
_classLoader = clazz.getClassLoader();
}
public void destroy() {
}
/**
* Returns the Spring bean ID for this bean.
*
* @return the Spring bean ID for this bean
*/
@Override
public String getBeanIdentifier() {
return _beanIdentifier;
}
/**
* Sets the Spring bean ID for this bean.
*
* @param beanIdentifier the Spring bean ID for this bean
*/
@Override
public void setBeanIdentifier(String beanIdentifier) {
_beanIdentifier = beanIdentifier;
}
@Override
public Object invokeMethod(String name, String[] parameterTypes,
Object[] arguments) throws Throwable {
Thread currentThread = Thread.currentThread();
ClassLoader contextClassLoader = currentThread.getContextClassLoader();
if (contextClassLoader != _classLoader) {
currentThread.setContextClassLoader(_classLoader);
}
try {
return _clpInvoker.invokeMethod(name, parameterTypes, arguments);
}
finally {
if (contextClassLoader != _classLoader) {
currentThread.setContextClassLoader(contextClassLoader);
}
}
}
protected Class<?> getModelClass() {
return DossierProcAgent.class;
}
protected String getModelClassName() {
return DossierProcAgent.class.getName();
}
/**
* Performs an SQL query.
*
* @param sql the sql query
*/
protected void runSQL(String sql) throws SystemException {
try {
DataSource dataSource = dossierProcAgentPersistence.getDataSource();
SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource,
sql, new int[0]);
sqlUpdate.update();
}
catch (Exception e) {
throw new SystemException(e);
}
}
@BeanReference(type = org.oep.dossiermgt.service.DocFileLocalService.class)
protected org.oep.dossiermgt.service.DocFileLocalService docFileLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DocFileService.class)
protected org.oep.dossiermgt.service.DocFileService docFileService;
@BeanReference(type = DocFilePersistence.class)
protected DocFilePersistence docFilePersistence;
@BeanReference(type = org.oep.dossiermgt.service.DocFileVersionLocalService.class)
protected org.oep.dossiermgt.service.DocFileVersionLocalService docFileVersionLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DocFileVersionService.class)
protected org.oep.dossiermgt.service.DocFileVersionService docFileVersionService;
@BeanReference(type = DocFileVersionPersistence.class)
protected DocFileVersionPersistence docFileVersionPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DocTemplateLocalService.class)
protected org.oep.dossiermgt.service.DocTemplateLocalService docTemplateLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DocTemplateService.class)
protected org.oep.dossiermgt.service.DocTemplateService docTemplateService;
@BeanReference(type = DocTemplatePersistence.class)
protected DocTemplatePersistence docTemplatePersistence;
@BeanReference(type = DocTemplateFinder.class)
protected DocTemplateFinder docTemplateFinder;
@BeanReference(type = org.oep.dossiermgt.service.DossierLocalService.class)
protected org.oep.dossiermgt.service.DossierLocalService dossierLocalService;
@BeanReference(type = DossierPersistence.class)
protected DossierPersistence dossierPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierDocLocalService.class)
protected org.oep.dossiermgt.service.DossierDocLocalService dossierDocLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierDocService.class)
protected org.oep.dossiermgt.service.DossierDocService dossierDocService;
@BeanReference(type = DossierDocPersistence.class)
protected DossierDocPersistence dossierDocPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierFolderLocalService.class)
protected org.oep.dossiermgt.service.DossierFolderLocalService dossierFolderLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierFolderService.class)
protected org.oep.dossiermgt.service.DossierFolderService dossierFolderService;
@BeanReference(type = DossierFolderPersistence.class)
protected DossierFolderPersistence dossierFolderPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierFolder2RoleLocalService.class)
protected org.oep.dossiermgt.service.DossierFolder2RoleLocalService dossierFolder2RoleLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierFolder2RoleService.class)
protected org.oep.dossiermgt.service.DossierFolder2RoleService dossierFolder2RoleService;
@BeanReference(type = DossierFolder2RolePersistence.class)
protected DossierFolder2RolePersistence dossierFolder2RolePersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcLocalService.class)
protected org.oep.dossiermgt.service.DossierProcLocalService dossierProcLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcService.class)
protected org.oep.dossiermgt.service.DossierProcService dossierProcService;
@BeanReference(type = DossierProcPersistence.class)
protected DossierProcPersistence dossierProcPersistence;
@BeanReference(type = DossierProcFinder.class)
protected DossierProcFinder dossierProcFinder;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcAgentLocalService.class)
protected org.oep.dossiermgt.service.DossierProcAgentLocalService dossierProcAgentLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcAgentService.class)
protected org.oep.dossiermgt.service.DossierProcAgentService dossierProcAgentService;
@BeanReference(type = DossierProcAgentPersistence.class)
protected DossierProcAgentPersistence dossierProcAgentPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcBookmarkLocalService.class)
protected org.oep.dossiermgt.service.DossierProcBookmarkLocalService dossierProcBookmarkLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierProcBookmarkService.class)
protected org.oep.dossiermgt.service.DossierProcBookmarkService dossierProcBookmarkService;
@BeanReference(type = DossierProcBookmarkPersistence.class)
protected DossierProcBookmarkPersistence dossierProcBookmarkPersistence;
@BeanReference(type = org.oep.dossiermgt.service.DossierTagLocalService.class)
protected org.oep.dossiermgt.service.DossierTagLocalService dossierTagLocalService;
@BeanReference(type = org.oep.dossiermgt.service.DossierTagService.class)
protected org.oep.dossiermgt.service.DossierTagService dossierTagService;
@BeanReference(type = DossierTagPersistence.class)
protected DossierTagPersistence dossierTagPersistence;
@BeanReference(type = org.oep.dossiermgt.service.EbMessageLocalService.class)
protected org.oep.dossiermgt.service.EbMessageLocalService ebMessageLocalService;
@BeanReference(type = org.oep.dossiermgt.service.EbMessageService.class)
protected org.oep.dossiermgt.service.EbMessageService ebMessageService;
@BeanReference(type = EbMessagePersistence.class)
protected EbMessagePersistence ebMessagePersistence;
@BeanReference(type = org.oep.dossiermgt.service.EbPartnerShipLocalService.class)
protected org.oep.dossiermgt.service.EbPartnerShipLocalService ebPartnerShipLocalService;
@BeanReference(type = org.oep.dossiermgt.service.EbPartnerShipService.class)
protected org.oep.dossiermgt.service.EbPartnerShipService ebPartnerShipService;
@BeanReference(type = EbPartnerShipPersistence.class)
protected EbPartnerShipPersistence ebPartnerShipPersistence;
@BeanReference(type = EbPartnerShipFinder.class)
protected EbPartnerShipFinder ebPartnerShipFinder;
@BeanReference(type = org.oep.dossiermgt.service.PaymentConfigLocalService.class)
protected org.oep.dossiermgt.service.PaymentConfigLocalService paymentConfigLocalService;
@BeanReference(type = org.oep.dossiermgt.service.PaymentConfigService.class)
protected org.oep.dossiermgt.service.PaymentConfigService paymentConfigService;
@BeanReference(type = PaymentConfigPersistence.class)
protected PaymentConfigPersistence paymentConfigPersistence;
@BeanReference(type = org.oep.dossiermgt.service.PaymentFileLocalService.class)
protected org.oep.dossiermgt.service.PaymentFileLocalService paymentFileLocalService;
@BeanReference(type = org.oep.dossiermgt.service.PaymentFileService.class)
protected org.oep.dossiermgt.service.PaymentFileService paymentFileService;
@BeanReference(type = PaymentFilePersistence.class)
protected PaymentFilePersistence paymentFilePersistence;
@BeanReference(type = org.oep.dossiermgt.service.PaymentRequestLocalService.class)
protected org.oep.dossiermgt.service.PaymentRequestLocalService paymentRequestLocalService;
@BeanReference(type = org.oep.dossiermgt.service.PaymentRequestService.class)
protected org.oep.dossiermgt.service.PaymentRequestService paymentRequestService;
@BeanReference(type = PaymentRequestPersistence.class)
protected PaymentRequestPersistence paymentRequestPersistence;
@BeanReference(type = org.oep.dossiermgt.service.ProfileDataLocalService.class)
protected org.oep.dossiermgt.service.ProfileDataLocalService profileDataLocalService;
@BeanReference(type = org.oep.dossiermgt.service.ProfileDataService.class)
protected org.oep.dossiermgt.service.ProfileDataService profileDataService;
@BeanReference(type = ProfileDataPersistence.class)
protected ProfileDataPersistence profileDataPersistence;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByAgencyLocalService.class)
protected org.oep.dossiermgt.service.StatisticByAgencyLocalService statisticByAgencyLocalService;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByAgencyService.class)
protected org.oep.dossiermgt.service.StatisticByAgencyService statisticByAgencyService;
@BeanReference(type = StatisticByAgencyPersistence.class)
protected StatisticByAgencyPersistence statisticByAgencyPersistence;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByDayLocalService.class)
protected org.oep.dossiermgt.service.StatisticByDayLocalService statisticByDayLocalService;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByDayService.class)
protected org.oep.dossiermgt.service.StatisticByDayService statisticByDayService;
@BeanReference(type = StatisticByDayPersistence.class)
protected StatisticByDayPersistence statisticByDayPersistence;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByDomainLocalService.class)
protected org.oep.dossiermgt.service.StatisticByDomainLocalService statisticByDomainLocalService;
@BeanReference(type = org.oep.dossiermgt.service.StatisticByDomainService.class)
protected org.oep.dossiermgt.service.StatisticByDomainService statisticByDomainService;
@BeanReference(type = StatisticByDomainPersistence.class)
protected StatisticByDomainPersistence statisticByDomainPersistence;
@BeanReference(type = com.liferay.counter.service.CounterLocalService.class)
protected com.liferay.counter.service.CounterLocalService counterLocalService;
@BeanReference(type = com.liferay.portal.service.ResourceLocalService.class)
protected com.liferay.portal.service.ResourceLocalService resourceLocalService;
@BeanReference(type = com.liferay.portal.service.UserLocalService.class)
protected com.liferay.portal.service.UserLocalService userLocalService;
@BeanReference(type = com.liferay.portal.service.UserService.class)
protected com.liferay.portal.service.UserService userService;
@BeanReference(type = UserPersistence.class)
protected UserPersistence userPersistence;
private String _beanIdentifier;
private ClassLoader _classLoader;
private DossierProcAgentServiceClpInvoker _clpInvoker = new DossierProcAgentServiceClpInvoker();
}
| |
/*
* Copyright 2014 - 2016 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.driver;
import io.aeron.Aeron;
import io.aeron.Image;
import io.aeron.Publication;
import io.aeron.driver.exceptions.ConfigurationException;
import io.aeron.driver.media.ReceiveChannelEndpoint;
import io.aeron.driver.media.SendChannelEndpoint;
import io.aeron.logbuffer.FrameDescriptor;
import io.aeron.protocol.DataHeaderFlyweight;
import org.agrona.BitUtil;
import org.agrona.LangUtil;
import org.agrona.concurrent.BackoffIdleStrategy;
import org.agrona.concurrent.ControllableIdleStrategy;
import org.agrona.concurrent.IdleStrategy;
import org.agrona.concurrent.broadcast.BroadcastBufferDescriptor;
import org.agrona.concurrent.ringbuffer.RingBufferDescriptor;
import org.agrona.concurrent.status.StatusIndicator;
import java.util.concurrent.TimeUnit;
import static io.aeron.driver.ThreadingMode.DEDICATED;
import static java.lang.Integer.getInteger;
import static java.lang.Long.getLong;
import static java.lang.System.getProperty;
import static org.agrona.BitUtil.fromHex;
/**
* Configuration options for the {@link MediaDriver}.
*/
public class Configuration
{
/**
* Property name for boolean value of term buffers should be created sparse.
*/
public static final String TERM_BUFFER_SPARSE_FILE_PROP_NAME = "aeron.term.buffer.sparse.file";
/**
* Should term buffers be created as sparse files. Defaults to false.
*
* If a platform supports sparse files then log buffer creation is faster with pages being allocated as
* needed. This can help for large numbers of channels/streams but can result in latency pauses.
*/
public static final String TERM_BUFFER_SPARSE_FILE = getProperty(TERM_BUFFER_SPARSE_FILE_PROP_NAME);
/**
* Length (in bytes) of the log buffers for terms.
*/
public static final String TERM_BUFFER_MAX_LENGTH_PROP_NAME = "aeron.term.buffer.max.length";
/**
* Default term max buffer length. The maximum possible term length is 1GB.
*/
public static final int TERM_BUFFER_LENGTH_MAX_DEFAULT = 1024 * 1024 * 1024;
/**
* Length (in bytes) of the log buffers for publication terms.
*/
public static final String TERM_BUFFER_LENGTH_PROP_NAME = "aeron.term.buffer.length";
/**
* Default term buffer length.
*/
public static final int TERM_BUFFER_LENGTH_DEFAULT = 16 * 1024 * 1024;
/**
* Property name for term buffer length (in bytes) for IPC buffers.
*/
public static final String IPC_TERM_BUFFER_LENGTH_PROP_NAME = "aeron.ipc.term.buffer.length";
/**
* Default IPC term buffer length.
*/
public static final int TERM_BUFFER_IPC_LENGTH_DEFAULT = 64 * 1024 * 1024;
/**
* IPC Term buffer length in bytes.
*/
public static final int IPC_TERM_BUFFER_LENGTH = getInteger(
IPC_TERM_BUFFER_LENGTH_PROP_NAME, TERM_BUFFER_IPC_LENGTH_DEFAULT);
/**
* Property name low file storage warning threshold.
*/
public static final String LOW_FILE_STORE_WARNING_THRESHOLD_PROP_NAME = "aeron.low.file.store.warning.threshold";
/**
* Default value for low file storage warning threshold.
*/
public static final long LOW_FILE_STORE_WARNING_THRESHOLD_DEFAULT = TERM_BUFFER_LENGTH_DEFAULT * 10L;
/**
* Default value for low file storage warning threshold.
*/
public static final long LOW_FILE_STORE_WARNING_THRESHOLD =
getLong(LOW_FILE_STORE_WARNING_THRESHOLD_PROP_NAME, LOW_FILE_STORE_WARNING_THRESHOLD_DEFAULT);
/**
* Length (in bytes) of the conductor buffer for control commands from the clients to the media driver conductor.
*/
public static final String CONDUCTOR_BUFFER_LENGTH_PROP_NAME = "aeron.conductor.buffer.length";
/**
* Default buffer length for conductor buffers between the client and the media driver conductor.
*/
public static final int CONDUCTOR_BUFFER_LENGTH_DEFAULT = (1024 * 1024) + RingBufferDescriptor.TRAILER_LENGTH;
/**
* Conductor buffer length in bytes.
*/
public static final int CONDUCTOR_BUFFER_LENGTH = getInteger(
CONDUCTOR_BUFFER_LENGTH_PROP_NAME, CONDUCTOR_BUFFER_LENGTH_DEFAULT);
/**
* Length (in bytes) of the broadcast buffers from the media driver to the clients.
*/
public static final String TO_CLIENTS_BUFFER_LENGTH_PROP_NAME = "aeron.clients.buffer.length";
/**
* Default buffer length for broadcast buffers from the media driver and the clients.
*/
public static final int TO_CLIENTS_BUFFER_LENGTH_DEFAULT = (1024 * 1024) + BroadcastBufferDescriptor.TRAILER_LENGTH;
/**
* Length for broadcast buffers from the media driver and the clients.
*/
public static final int TO_CLIENTS_BUFFER_LENGTH = getInteger(
TO_CLIENTS_BUFFER_LENGTH_PROP_NAME, TO_CLIENTS_BUFFER_LENGTH_DEFAULT);
/**
* Property name for length of the error buffer for the system counters.
*/
public static final String COUNTERS_VALUES_BUFFER_LENGTH_PROP_NAME = "aeron.counters.buffer.length";
/**
* Default length of the memory mapped buffers for the system counters file.
*/
public static final int COUNTERS_VALUES_BUFFER_LENGTH_DEFAULT = 1024 * 1024;
/**
* Length of the memory mapped buffers for the system counters file.
*/
public static final int COUNTERS_VALUES_BUFFER_LENGTH = getInteger(
COUNTERS_VALUES_BUFFER_LENGTH_PROP_NAME, COUNTERS_VALUES_BUFFER_LENGTH_DEFAULT);
public static final int COUNTERS_METADATA_BUFFER_LENGTH = COUNTERS_VALUES_BUFFER_LENGTH * 2;
/**
* Property name for length of the memory mapped buffer for the distinct error log.
*/
public static final String ERROR_BUFFER_LENGTH_PROP_NAME = "aeron.error.buffer.length";
/**
* Default buffer length for the error buffer for the media driver.
*/
public static final int ERROR_BUFFER_LENGTH_DEFAULT = 1024 * 1024;
/**
* Buffer length for the error buffer for the media driver.
*/
public static final int ERROR_BUFFER_LENGTH = getInteger(
ERROR_BUFFER_LENGTH_PROP_NAME, ERROR_BUFFER_LENGTH_DEFAULT);
/**
* Property name for length of the memory mapped buffer for the loss report buffer.
*/
public static final String LOSS_REPORT_BUFFER_LENGTH_PROP_NAME = "aeron.loss.report.buffer.length";
/**
* Default buffer length for the loss report buffer.
*/
public static final int LOSS_REPORT_BUFFER_LENGTH_DEFAULT = 1024 * 1024;
/**
* Buffer length for the loss report buffer for the media driver.
*/
public static final int LOSS_REPORT_BUFFER_LENGTH = getInteger(
LOSS_REPORT_BUFFER_LENGTH_PROP_NAME, LOSS_REPORT_BUFFER_LENGTH_DEFAULT);
/**
* Property name for length of the initial window which must be sufficient for Bandwidth Delay Produce (BDP).
*/
public static final String INITIAL_WINDOW_LENGTH_PROP_NAME = "aeron.rcv.initial.window.length";
/**
* Default initial window length for flow control sender to receiver purposes
*
* Length of Initial Window
*
* RTT (LAN) = 100 usec
* Throughput = 10 Gbps
*
* Buffer = Throughput * RTT
* Buffer = (10 * 1000 * 1000 * 1000 / 8) * 0.0001 = 125000
* Round to 128KB
*/
public static final int INITIAL_WINDOW_LENGTH_DEFAULT = 128 * 1024;
/**
* Property name for status message timeout in nanoseconds.
*/
public static final String STATUS_MESSAGE_TIMEOUT_PROP_NAME = "aeron.rcv.status.message.timeout";
/**
* Max timeout between SMs.
*/
public static final long STATUS_MESSAGE_TIMEOUT_DEFAULT_NS = TimeUnit.MILLISECONDS.toNanos(200);
/**
* Property name for ratio of sending data to polling status messages in the Sender.
*/
public static final String SEND_TO_STATUS_POLL_RATIO_PROP_NAME = "aeron.send.to.status.poll.ratio";
/**
* The ratio for sending data to polling status messages in the Sender.
*/
public static final int SEND_TO_STATUS_POLL_RATIO_DEFAULT = 4;
/**
* Property name for SO_RCVBUF setting on UDP sockets which must be sufficient for Bandwidth Delay Produce (BDP).
*/
public static final String SOCKET_RCVBUF_LENGTH_PROP_NAME = "aeron.socket.so_rcvbuf";
/**
* Default SO_RCVBUF length.
*/
public static final int SOCKET_RCVBUF_LENGTH_DEFAULT = 128 * 1024;
/**
* SO_RCVBUF length, 0 means use OS default.
*/
public static final int SOCKET_RCVBUF_LENGTH = getInteger(
SOCKET_RCVBUF_LENGTH_PROP_NAME, SOCKET_RCVBUF_LENGTH_DEFAULT);
/**
* Property name for SO_SNDBUF setting on UDP sockets which must be sufficient for Bandwidth Delay Produce (BDP).
*/
public static final String SOCKET_SNDBUF_LENGTH_PROP_NAME = "aeron.socket.so_sndbuf";
/**
* Default SO_SNDBUF length.
*/
public static final int SOCKET_SNDBUF_LENGTH_DEFAULT = 0;
/**
* SO_SNDBUF length, 0 means use OS default.
*/
public static final int SOCKET_SNDBUF_LENGTH = getInteger(
SOCKET_SNDBUF_LENGTH_PROP_NAME, SOCKET_SNDBUF_LENGTH_DEFAULT);
/**
* Property name for IP_MULTICAST_TTL setting on UDP sockets.
*/
public static final String SOCKET_MULTICAST_TTL_PROP_NAME = "aeron.socket.multicast.ttl";
/**
* Multicast TTL value, 0 means use OS default.
*/
public static final int SOCKET_MULTICAST_TTL_DEFAULT = 0;
/**
* Multicast TTL value.
*/
public static final int SOCKET_MULTICAST_TTL = getInteger(
SOCKET_MULTICAST_TTL_PROP_NAME, SOCKET_MULTICAST_TTL_DEFAULT);
/**
* Property name for linger timeout on {@link Publication}s.
*/
public static final String PUBLICATION_LINGER_PROP_NAME = "aeron.publication.linger.timeout";
/**
* Default time for {@link Publication}s to linger before cleanup.
*/
public static final long PUBLICATION_LINGER_DEFAULT_NS = TimeUnit.SECONDS.toNanos(5);
/**
* Time for {@link Publication}s to linger before cleanup.
*/
public static final long PUBLICATION_LINGER_NS = getLong(
PUBLICATION_LINGER_PROP_NAME, PUBLICATION_LINGER_DEFAULT_NS);
/**
* Property name for {@link Aeron} client liveness timeout.
*/
public static final String CLIENT_LIVENESS_TIMEOUT_PROP_NAME = "aeron.client.liveness.timeout";
/**
* Default timeout for client liveness in nanoseconds.
*/
public static final long CLIENT_LIVENESS_TIMEOUT_DEFAULT_NS = TimeUnit.MILLISECONDS.toNanos(5000);
/**
* Timeout for client liveness in nanoseconds.
*/
public static final long CLIENT_LIVENESS_TIMEOUT_NS = getLong(
CLIENT_LIVENESS_TIMEOUT_PROP_NAME, CLIENT_LIVENESS_TIMEOUT_DEFAULT_NS);
/**
* Property name for {@link Image} liveness timeout.
*/
public static final String IMAGE_LIVENESS_TIMEOUT_PROP_NAME = "aeron.image.liveness.timeout";
/**
* Default timeout for {@link Image} liveness in nanoseconds.
*/
public static final long IMAGE_LIVENESS_TIMEOUT_DEFAULT_NS = TimeUnit.SECONDS.toNanos(10);
/**
* Timeout for {@link Image} liveness in nanoseconds.
*/
public static final long IMAGE_LIVENESS_TIMEOUT_NS = getLong(
IMAGE_LIVENESS_TIMEOUT_PROP_NAME, IMAGE_LIVENESS_TIMEOUT_DEFAULT_NS);
/**
* Property name for window limit on {@link Publication} side.
*/
public static final String PUBLICATION_TERM_WINDOW_LENGTH_PROP_NAME = "aeron.publication.term.window.length";
/**
* Publication term window length for flow control in bytes.
*/
public static final int PUBLICATION_TERM_WINDOW_LENGTH = getInteger(PUBLICATION_TERM_WINDOW_LENGTH_PROP_NAME, 0);
/**
* Property name for window limit for IPC publications.
*/
public static final String IPC_PUBLICATION_TERM_WINDOW_LENGTH_PROP_NAME =
"aeron.ipc.publication.term.window.length";
/**
* IPC Publication term window length for flow control in bytes.
*/
public static final int IPC_PUBLICATION_TERM_WINDOW_LENGTH = getInteger(
IPC_PUBLICATION_TERM_WINDOW_LENGTH_PROP_NAME, 0);
/**
* Property name for {@link Publication} unblock timeout.
*/
public static final String PUBLICATION_UNBLOCK_TIMEOUT_PROP_NAME = "aeron.publication.unblock.timeout";
/**
* Timeout for {@link Publication} unblock in nanoseconds.
*/
public static final long PUBLICATION_UNBLOCK_TIMEOUT_DEFAULT_NS = TimeUnit.SECONDS.toNanos(10);
/**
* Publication timeout for when to unblock a partially written message.
*/
public static final long PUBLICATION_UNBLOCK_TIMEOUT_NS = getLong(
PUBLICATION_UNBLOCK_TIMEOUT_PROP_NAME, PUBLICATION_UNBLOCK_TIMEOUT_DEFAULT_NS);
private static final String DEFAULT_IDLE_STRATEGY = "org.agrona.concurrent.BackoffIdleStrategy";
static final long AGENT_IDLE_MAX_SPINS = 100;
static final long AGENT_IDLE_MAX_YIELDS = 100;
static final long AGENT_IDLE_MIN_PARK_NS = TimeUnit.NANOSECONDS.toNanos(1);
static final long AGENT_IDLE_MAX_PARK_NS = TimeUnit.MICROSECONDS.toNanos(100);
private static final String CONTROLLABLE_IDLE_STRATEGY = "org.agrona.concurrent.ControllableIdleStrategy";
/**
* Property name for {@link IdleStrategy} to be employed by {@link Sender} for {@link ThreadingMode#DEDICATED}.
*/
public static final String SENDER_IDLE_STRATEGY_PROP_NAME = "aeron.sender.idle.strategy";
/**
* {@link IdleStrategy} to be employed by {@link Sender} for {@link ThreadingMode#DEDICATED}.
*/
public static final String SENDER_IDLE_STRATEGY = getProperty(
SENDER_IDLE_STRATEGY_PROP_NAME, DEFAULT_IDLE_STRATEGY);
/**
* Property name for {@link IdleStrategy} to be employed by {@link DriverConductor} for
* {@link ThreadingMode#DEDICATED} and {@link ThreadingMode#SHARED_NETWORK}.
*/
public static final String CONDUCTOR_IDLE_STRATEGY_PROP_NAME = "aeron.conductor.idle.strategy";
/**
* {@link IdleStrategy} to be employed by {@link DriverConductor} for {@link ThreadingMode#DEDICATED}
* and {@link ThreadingMode#SHARED_NETWORK}.
*/
public static final String CONDUCTOR_IDLE_STRATEGY = getProperty(
CONDUCTOR_IDLE_STRATEGY_PROP_NAME, DEFAULT_IDLE_STRATEGY);
/**
* Property name for {@link IdleStrategy} to be employed by {@link Receiver} for {@link ThreadingMode#DEDICATED}.
*/
public static final String RECEIVER_IDLE_STRATEGY_PROP_NAME = "aeron.receiver.idle.strategy";
/**
* {@link IdleStrategy} to be employed by {@link Receiver} for {@link ThreadingMode#DEDICATED}.
*/
public static final String RECEIVER_IDLE_STRATEGY = getProperty(
RECEIVER_IDLE_STRATEGY_PROP_NAME, DEFAULT_IDLE_STRATEGY);
/**
* Property name for {@link IdleStrategy} to be employed by {@link Sender} and {@link Receiver} for
* {@link ThreadingMode#SHARED_NETWORK}.
*/
public static final String SHARED_NETWORK_IDLE_STRATEGY_PROP_NAME = "aeron.sharednetwork.idle.strategy";
/**
* {@link IdleStrategy} to be employed by {@link Sender} and {@link Receiver} for
* {@link ThreadingMode#SHARED_NETWORK}.
*/
public static final String SHARED_NETWORK_IDLE_STRATEGY = getProperty(
SHARED_NETWORK_IDLE_STRATEGY_PROP_NAME, DEFAULT_IDLE_STRATEGY);
/**
* Property name for {@link IdleStrategy} to be employed by {@link Sender}, {@link Receiver},
* and {@link DriverConductor} for {@link ThreadingMode#SHARED}.
*/
public static final String SHARED_IDLE_STRATEGY_PROP_NAME = "aeron.shared.idle.strategy";
/**
* {@link IdleStrategy} to be employed by {@link Sender}, {@link Receiver}, and {@link DriverConductor}
* for {@link ThreadingMode#SHARED}.
*/
public static final String SHARED_IDLE_STRATEGY = getProperty(
SHARED_IDLE_STRATEGY_PROP_NAME, DEFAULT_IDLE_STRATEGY);
/**
* Property name for {@link FlowControl} to be employed for unicast channels.
*/
public static final String UNICAST_FLOW_CONTROL_STRATEGY_PROP_NAME = "aeron.unicast.flow.control.strategy";
/**
* {@link FlowControl} to be employed for unicast channels.
*/
public static final String UNICAST_FLOW_CONTROL_STRATEGY = getProperty(
UNICAST_FLOW_CONTROL_STRATEGY_PROP_NAME, "io.aeron.driver.UnicastFlowControl");
/**
* Property name for {@link FlowControl} to be employed for multicast channels.
*/
public static final String MULTICAST_FLOW_CONTROL_STRATEGY_PROP_NAME = "aeron.multicast.flow.control.strategy";
/**
* {@link FlowControl} to be employed for multicast channels.
*/
public static final String MULTICAST_FLOW_CONTROL_STRATEGY = getProperty(
MULTICAST_FLOW_CONTROL_STRATEGY_PROP_NAME, "io.aeron.driver.MaxMulticastFlowControl");
/**
* Property name for {@link FlowControlSupplier} to be employed for unicast channels.
*/
public static final String UNICAST_FLOW_CONTROL_STRATEGY_SUPPLIER_PROP_NAME = "aeron.unicast.FlowControl.supplier";
/**
* {@link FlowControlSupplier} to be employed for unicast channels.
*/
public static final String UNICAST_FLOW_CONTROL_STRATEGY_SUPPLIER = getProperty(
UNICAST_FLOW_CONTROL_STRATEGY_SUPPLIER_PROP_NAME, "io.aeron.driver.DefaultUnicastFlowControlSupplier");
/**
* Property name for {@link FlowControlSupplier} to be employed for unicast channels.
*/
public static final String MULTICAST_FLOW_CONTROL_STRATEGY_SUPPLIER_PROP_NAME =
"aeron.multicast.FlowControl.supplier";
/**
* {@link FlowControlSupplier} to be employed for multicast channels.
*/
public static final String MULTICAST_FLOW_CONTROL_STRATEGY_SUPPLIER = getProperty(
MULTICAST_FLOW_CONTROL_STRATEGY_SUPPLIER_PROP_NAME, "io.aeron.driver.DefaultMulticastFlowControlSupplier");
/**
* Length of the maximum transmission unit of the media driver's protocol
*/
public static final String MTU_LENGTH_PROP_NAME = "aeron.mtu.length";
/**
* Default length is greater than typical Ethernet MTU so will fragment to save on system calls.
*/
public static final int MTU_LENGTH_DEFAULT = 4096;
/**
* Length of the MTU to use for sending messages.
*/
public static final int MTU_LENGTH = getInteger(MTU_LENGTH_PROP_NAME, MTU_LENGTH_DEFAULT);
/**
* Maximum UDP datagram payload size for IPv4. Jumbo datagrams from IPv6 are not supported.
*/
public static final int MAX_UDP_PAYLOAD_LENGTH = 65507;
/**
* {@link ThreadingMode} to be used by the Aeron {@link MediaDriver}
*/
public static final String THREADING_MODE_PROP_NAME = "aeron.threading.mode";
static final ThreadingMode THREADING_MODE_DEFAULT = ThreadingMode.valueOf(
getProperty(THREADING_MODE_PROP_NAME, DEDICATED.name()));
/**
* How often to check liveness and cleanup
*/
public static final long HEARTBEAT_TIMEOUT_NS = TimeUnit.SECONDS.toNanos(1);
/**
* Property name for {@link SendChannelEndpointSupplier}.
*/
public static final String SEND_CHANNEL_ENDPOINT_SUPPLIER_PROP_NAME = "aeron.SendChannelEndpoint.supplier";
/**
*{@link SendChannelEndpointSupplier} to provide endpoint extension behaviour.
*/
public static final String SEND_CHANNEL_ENDPOINT_SUPPLIER = getProperty(
SEND_CHANNEL_ENDPOINT_SUPPLIER_PROP_NAME, "io.aeron.driver.DefaultSendChannelEndpointSupplier");
/**
* Property name for {@link ReceiveChannelEndpointSupplier}.
*/
public static final String RECEIVE_CHANNEL_ENDPOINT_SUPPLIER_PROP_NAME = "aeron.ReceiveChannelEndpoint.supplier";
/**
* {@link ReceiveChannelEndpointSupplier} to provide endpoint extension behaviour.
*/
public static final String RECEIVE_CHANNEL_ENDPOINT_SUPPLIER = getProperty(
RECEIVE_CHANNEL_ENDPOINT_SUPPLIER_PROP_NAME, "io.aeron.driver.DefaultReceiveChannelEndpointSupplier");
/**
* Property name for Application Specific Feedback added to Status Messages by the driver.
*/
public static final String SM_APPLICATION_SPECIFIC_FEEDBACK_PROP_NAME =
"aeron.flow.control.sm.applicationSpecificFeedback";
/**
* Value to use for all Status Message Application Specific Feedback values from the driver.
*/
public static final byte[] SM_APPLICATION_SPECIFIC_FEEDBACK = fromHex(getProperty(
SM_APPLICATION_SPECIFIC_FEEDBACK_PROP_NAME, ""));
/**
* Property name for {@link CongestionControlSupplier} to be employed for receivers.
*/
public static final String CONGESTION_CONTROL_STRATEGY_SUPPLIER_PROP_NAME = "aeron.CongestionControl.supplier";
/**
* {@link CongestionControlSupplier} to be employed for receivers.
*/
public static final String CONGESTION_CONTROL_STRATEGY_SUPPLIER = getProperty(
CONGESTION_CONTROL_STRATEGY_SUPPLIER_PROP_NAME, "io.aeron.driver.DefaultCongestionControlSupplier");
/**
* Capacity for the command queues used between driver agents.
*/
public static final int CMD_QUEUE_CAPACITY = 1024;
/**
* Timeout on cleaning up pending SETUP state on subscriber.
*/
public static final long PENDING_SETUPS_TIMEOUT_NS = TimeUnit.MILLISECONDS.toNanos(1000);
/**
* Timeout between SETUP frames for publications during initial setup phase.
*/
public static final long PUBLICATION_SETUP_TIMEOUT_NS = TimeUnit.MILLISECONDS.toNanos(100);
/**
* Timeout between heartbeats for publications.
*/
public static final long PUBLICATION_HEARTBEAT_TIMEOUT_NS = TimeUnit.MILLISECONDS.toNanos(100);
/**
* Default group size estimate for NAK delay randomization.
*/
public static final int NAK_GROUPSIZE_DEFAULT = 10;
/**
* Default group RTT estimate for NAK delay randomization in ms.
*/
public static final int NAK_GRTT_DEFAULT = 10;
/**
* Default max backoff for NAK delay randomization in ms.
*/
public static final long NAK_MAX_BACKOFF_DEFAULT = TimeUnit.MILLISECONDS.toNanos(60);
/**
* Multicast NAK delay is immediate initial with delayed subsequent delay.
*/
public static final OptimalMulticastDelayGenerator NAK_MULTICAST_DELAY_GENERATOR =
new OptimalMulticastDelayGenerator(NAK_MAX_BACKOFF_DEFAULT, NAK_GROUPSIZE_DEFAULT, NAK_GRTT_DEFAULT);
/**
* Default Unicast NAK delay in nanoseconds.
*/
public static final long NAK_UNICAST_DELAY_DEFAULT_NS = TimeUnit.MILLISECONDS.toNanos(60);
/**
* Unicast NAK delay is immediate initial with delayed subsequent delay.
*/
public static final StaticDelayGenerator NAK_UNICAST_DELAY_GENERATOR = new StaticDelayGenerator(
NAK_UNICAST_DELAY_DEFAULT_NS, true);
/**
* Default delay for retransmission of data for unicast.
*/
public static final long RETRANSMIT_UNICAST_DELAY_DEFAULT_NS = TimeUnit.NANOSECONDS.toNanos(0);
/**
* Source uses same for unicast and multicast. For ticks.
*/
public static final FeedbackDelayGenerator RETRANSMIT_UNICAST_DELAY_GENERATOR =
() -> RETRANSMIT_UNICAST_DELAY_DEFAULT_NS;
/**
* Default delay for linger for unicast.
*/
public static final long RETRANSMIT_UNICAST_LINGER_DEFAULT_NS = TimeUnit.MILLISECONDS.toNanos(60);
/**
* Delay for linger for unicast.
*/
public static final FeedbackDelayGenerator RETRANSMIT_UNICAST_LINGER_GENERATOR =
() -> RETRANSMIT_UNICAST_LINGER_DEFAULT_NS;
/**
* Default max number of active retransmissions per connected stream.
*/
public static final int MAX_RETRANSMITS_DEFAULT = 16;
/**
* Validate the the term buffer length is a power of two.
*
* @param length of the term buffer
*/
public static void validateTermBufferLength(final int length)
{
if (!BitUtil.isPowerOfTwo(length))
{
throw new IllegalStateException("Term buffer length must be a positive power of 2: " + length);
}
}
/**
* How far ahead the publisher can get from the sender position.
*
* @param termBufferLength to be used when {@link #PUBLICATION_TERM_WINDOW_LENGTH} is not set.
* @return the length to be used for the publication window.
*/
public static int publicationTermWindowLength(final int termBufferLength)
{
int publicationTermWindowLength = termBufferLength / 2;
if (0 != PUBLICATION_TERM_WINDOW_LENGTH)
{
publicationTermWindowLength = Math.min(PUBLICATION_TERM_WINDOW_LENGTH, publicationTermWindowLength);
}
return publicationTermWindowLength;
}
/**
* How far ahead the publisher can get from the sender position for IPC only.
*
* @param termBufferLength to be used when {@link #IPC_PUBLICATION_TERM_WINDOW_LENGTH} is not set.
* @return the length to be used for the publication window.
*/
public static int ipcPublicationTermWindowLength(final int termBufferLength)
{
int publicationTermWindowLength = termBufferLength;
if (0 != IPC_PUBLICATION_TERM_WINDOW_LENGTH)
{
publicationTermWindowLength = Math.min(IPC_PUBLICATION_TERM_WINDOW_LENGTH, publicationTermWindowLength);
}
return publicationTermWindowLength;
}
/**
* How large the term buffer should be for IPC only.
*
* @param termBufferLength to be used when {@link #IPC_TERM_BUFFER_LENGTH} is not set.
* @return the length to be used for the term buffer in bytes
*/
public static int ipcTermBufferLength(final int termBufferLength)
{
return 0 != IPC_TERM_BUFFER_LENGTH ? IPC_TERM_BUFFER_LENGTH : termBufferLength;
}
/**
* Validate that the initial window length is suitably greater than MTU.
*
* @param initialWindowLength to be validated.
* @param mtuLength against which to validate.
*/
public static void validateInitialWindowLength(final int initialWindowLength, final int mtuLength)
{
if (mtuLength > initialWindowLength)
{
throw new IllegalStateException("Initial window length must be >= to MTU length: " + mtuLength);
}
}
/**
* Get the {@link IdleStrategy} that should be applied to {@link org.agrona.concurrent.Agent}s.
*
* @param strategyName to be created.
* @param controllableStatus status indicator for what the strategy should do.
* @return the newly created IdleStrategy.
*/
public static IdleStrategy agentIdleStrategy(final String strategyName, final StatusIndicator controllableStatus)
{
IdleStrategy idleStrategy = null;
switch (strategyName)
{
case DEFAULT_IDLE_STRATEGY:
idleStrategy = new BackoffIdleStrategy(
AGENT_IDLE_MAX_SPINS, AGENT_IDLE_MAX_YIELDS, AGENT_IDLE_MIN_PARK_NS, AGENT_IDLE_MAX_PARK_NS);
break;
case CONTROLLABLE_IDLE_STRATEGY:
idleStrategy = new ControllableIdleStrategy(controllableStatus);
controllableStatus.setOrdered(ControllableIdleStrategy.PARK);
break;
default:
try
{
idleStrategy = (IdleStrategy)Class.forName(strategyName).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
break;
}
return idleStrategy;
}
static IdleStrategy senderIdleStrategy(final StatusIndicator controllableStatus)
{
return agentIdleStrategy(SENDER_IDLE_STRATEGY, controllableStatus);
}
static IdleStrategy conductorIdleStrategy(final StatusIndicator controllableStatus)
{
return agentIdleStrategy(CONDUCTOR_IDLE_STRATEGY, controllableStatus);
}
static IdleStrategy receiverIdleStrategy(final StatusIndicator controllableStatus)
{
return agentIdleStrategy(RECEIVER_IDLE_STRATEGY, controllableStatus);
}
static IdleStrategy sharedNetworkIdleStrategy(final StatusIndicator controllableStatus)
{
return agentIdleStrategy(SHARED_NETWORK_IDLE_STRATEGY, controllableStatus);
}
static IdleStrategy sharedIdleStrategy(final StatusIndicator controllableStatus)
{
return agentIdleStrategy(SHARED_IDLE_STRATEGY, controllableStatus);
}
static int termBufferLength()
{
return getInteger(TERM_BUFFER_LENGTH_PROP_NAME, TERM_BUFFER_LENGTH_DEFAULT);
}
static int maxTermBufferLength()
{
return getInteger(TERM_BUFFER_MAX_LENGTH_PROP_NAME, TERM_BUFFER_LENGTH_MAX_DEFAULT);
}
static int initialWindowLength()
{
return getInteger(INITIAL_WINDOW_LENGTH_PROP_NAME, INITIAL_WINDOW_LENGTH_DEFAULT);
}
static long statusMessageTimeout()
{
return getLong(STATUS_MESSAGE_TIMEOUT_PROP_NAME, STATUS_MESSAGE_TIMEOUT_DEFAULT_NS);
}
static int sendToStatusMessagePollRatio()
{
return getInteger(SEND_TO_STATUS_POLL_RATIO_PROP_NAME, SEND_TO_STATUS_POLL_RATIO_DEFAULT);
}
/**
* Get the supplier of {@link SendChannelEndpoint}s which can be used for
* debugging, monitoring, or modifying the behaviour when sending to the media channel.
*
* @return the {@link SendChannelEndpointSupplier}.
*/
static SendChannelEndpointSupplier sendChannelEndpointSupplier()
{
SendChannelEndpointSupplier supplier = null;
try
{
supplier = (SendChannelEndpointSupplier)Class.forName(SEND_CHANNEL_ENDPOINT_SUPPLIER).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return supplier;
}
/**
* Get the supplier of {@link ReceiveChannelEndpoint}s which can be used for
* debugging, monitoring, or modifying the behaviour when receiving from the media channel.
*
* @return the {@link SendChannelEndpointSupplier}.
*/
static ReceiveChannelEndpointSupplier receiveChannelEndpointSupplier()
{
ReceiveChannelEndpointSupplier supplier = null;
try
{
supplier = (ReceiveChannelEndpointSupplier)Class.forName(RECEIVE_CHANNEL_ENDPOINT_SUPPLIER).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return supplier;
}
/**
* Get the supplier of {@link FlowControl}s which can be used for changing behavior of flow control for unicast
* publications.
*
* @return the {@link FlowControlSupplier}.
*/
static FlowControlSupplier unicastFlowControlSupplier()
{
FlowControlSupplier supplier = null;
try
{
supplier = (FlowControlSupplier)Class.forName(UNICAST_FLOW_CONTROL_STRATEGY_SUPPLIER).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return supplier;
}
/**
* Get the supplier of {@link FlowControl}s which can be used for changing behavior of flow control for unicast
* publications.
*
* @return the {@link FlowControlSupplier}.
*/
static FlowControlSupplier multicastFlowControlSupplier()
{
FlowControlSupplier supplier = null;
try
{
supplier = (FlowControlSupplier)Class.forName(MULTICAST_FLOW_CONTROL_STRATEGY_SUPPLIER).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return supplier;
}
/**
* Get the supplier of {@link CongestionControl}s which can be used for receivers.
*
* @return the {@link CongestionControlSupplier}
*/
static CongestionControlSupplier congestionControlSupplier()
{
CongestionControlSupplier supplier = null;
try
{
supplier = (CongestionControlSupplier)Class.forName(CONGESTION_CONTROL_STRATEGY_SUPPLIER).newInstance();
}
catch (final Exception ex)
{
LangUtil.rethrowUnchecked(ex);
}
return supplier;
}
/**
* Validate the the MTU is an appropriate length. MTU lengths must be a multiple of
* {@link FrameDescriptor#FRAME_ALIGNMENT}.
*
* @param mtuLength to be validated.
* @throws ConfigurationException if the MTU length is not valid.
*/
public static void validateMtuLength(final int mtuLength)
{
if (mtuLength < DataHeaderFlyweight.HEADER_LENGTH || mtuLength > MAX_UDP_PAYLOAD_LENGTH)
{
throw new ConfigurationException(
"mtuLength must be a >= HEADER_LENGTH and <= " + MAX_UDP_PAYLOAD_LENGTH + ": mtuLength=" + mtuLength);
}
if ((mtuLength % FrameDescriptor.FRAME_ALIGNMENT) != 0)
{
throw new ConfigurationException(String.format(
"mtuLength must be a multiple of %d: mtuLength=%d",
FrameDescriptor.FRAME_ALIGNMENT, mtuLength));
}
}
}
| |
/*
* Copyright 2014 Simone Filice and Giuseppe Castellucci and Danilo Croce and Roberto Basili
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.uniroma2.sag.kelp.examples.demo.tweetsent2013;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import it.uniroma2.sag.kelp.data.dataset.Dataset;
import it.uniroma2.sag.kelp.data.dataset.SimpleDataset;
import it.uniroma2.sag.kelp.data.example.Example;
import it.uniroma2.sag.kelp.data.label.Label;
import it.uniroma2.sag.kelp.kernel.Kernel;
import it.uniroma2.sag.kelp.kernel.cache.FixIndexSquaredNormCache;
import it.uniroma2.sag.kelp.kernel.cache.FixSizeKernelCache;
import it.uniroma2.sag.kelp.kernel.standard.LinearKernelCombination;
import it.uniroma2.sag.kelp.kernel.standard.NormalizationKernel;
import it.uniroma2.sag.kelp.kernel.standard.PolynomialKernel;
import it.uniroma2.sag.kelp.kernel.standard.RbfKernel;
import it.uniroma2.sag.kelp.kernel.vector.LinearKernel;
import it.uniroma2.sag.kelp.learningalgorithm.classification.libsvm.BinaryCSvmClassification;
import it.uniroma2.sag.kelp.learningalgorithm.classification.multiclassification.OneVsAllLearning;
import it.uniroma2.sag.kelp.predictionfunction.classifier.multiclass.OneVsAllClassificationOutput;
import it.uniroma2.sag.kelp.predictionfunction.classifier.multiclass.OneVsAllClassifier;
import it.uniroma2.sag.kelp.utils.JacksonSerializerWrapper;
import it.uniroma2.sag.kelp.utils.ObjectSerializer;
import it.uniroma2.sag.kelp.utils.evaluation.MulticlassClassificationEvaluator;
import it.uniroma2.sag.kelp.utils.exception.NoSuchPerformanceMeasureException;
public class TweetSentimentAnalysisSemeval2013 {
private static String FIELD_SEP = "\t";
private static String errors_file = "src/main/resources/tweetSentiment2013/errors.txt";
public static void main(String[] args) throws Exception {
float split = 0.8f;
String train_file = "src/main/resources/tweetSentiment2013/train.klp.gz";
String test_file = "src/main/resources/tweetSentiment2013/test.klp.gz";
int kernelmode = 1;
float polyD = 0;
float gamma = 0;
float[] Cs = new float[] { 0.1f, 0.5f, 1f };
// Read a dataset into a test variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet.populate(train_file);
// Read a dataset into a test variable
SimpleDataset testSet = new SimpleDataset();
testSet.populate(test_file);
// set the cache size
int cacheSize = trainingSet.getNumberOfExamples()
+ testSet.getNumberOfExamples();
// Initialize a kernel
Kernel kernel = null;
switch (kernelmode) {
case 1:
kernel = getBowKernel(cacheSize);
break;
case 2:
kernel = getPolyBow(cacheSize, polyD);
break;
case 3:
kernel = getWordspaceKernel(cacheSize);
break;
case 4:
kernel = getRbfWordspaceKernel(cacheSize, gamma);
break;
case 5:
kernel = getBowWordSpaceKernel(cacheSize);
break;
case 6:
kernel = getPolyBowRbfWordspaceKernel(cacheSize, polyD, gamma);
break;
default:
kernel = getBowKernel(cacheSize);
break;
}
// Find optimal C
float c = tune(trainingSet, kernel, split, Cs);
System.out.println("start testing with C=" + c);
// test
float f1 = test(trainingSet, kernel, c, testSet, true);
System.out.println("Mean F1 on test set=" + f1);
}
private static float test(SimpleDataset trainingSet, Kernel kernel,
float c, SimpleDataset testSet, boolean printErrors)
throws NoSuchPerformanceMeasureException, IOException {
ArrayList<Label> classes = (ArrayList<Label>) trainingSet
.getClassificationLabels();
// Instantiate an svmSolver
BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
svmSolver.setKernel(kernel);
svmSolver.setCp(c);
svmSolver.setCn(c);
svmSolver.setFairness(true);
// Instantiate a OneVsAll multiclassification schema
OneVsAllLearning ovaLearner = new OneVsAllLearning();
ovaLearner.setBaseAlgorithm(svmSolver);
ovaLearner.setLabels(classes);
// Learn
ovaLearner.learn(trainingSet);
// Get and save on file the learning and prediction Function
OneVsAllClassifier f = ovaLearner.getPredictionFunction();
ObjectSerializer serializer = new JacksonSerializerWrapper();
serializer
.writeValueOnFile(ovaLearner,
"src/main/resources/tweetSentiment2013/learningAlgorithmSpecification_multi.klp");
serializer
.writeValueOnFile(f,
"src/main/resources/tweetSentiment2013/classificationAlgorithm_bow_ws.klp");
// Adopt a built-in evaluator
MulticlassClassificationEvaluator evaluator = new MulticlassClassificationEvaluator(
classes);
PrintStream ps = null;
if (printErrors)
ps = new PrintStream(errors_file, "UTF-8");
for (Example e : testSet.getExamples()) {
OneVsAllClassificationOutput predict = f.predict(e);
Label gold = e.getLabels()[0];
Label pred = predict.getPredictedClasses().get(0);
if (printErrors)
ps.println(gold + "\t" + pred + "\t"
+ (gold.equals(pred) ? "1" : "0"));
evaluator.addCount(e, predict);
}
if (printErrors) {
ps.flush();
ps.close();
}
Label neu = findLabel("neutral", classes);
Label pos = findLabel("positive", classes);
Label neg = findLabel("negative", classes);
ArrayList<Label> posNeg = new ArrayList<Label>();
posNeg.add(pos);
posNeg.add(neg);
ArrayList<Label> posNegNeu = new ArrayList<Label>();
posNegNeu.add(pos);
posNegNeu.add(neg);
posNegNeu.add(neu);
StringBuilder b = new StringBuilder();
for (Label l : posNegNeu) {
b.append(FIELD_SEP + l + FIELD_SEP);
}
b.append("\n");
b.append("Precision" + FIELD_SEP + "Recall" + FIELD_SEP + "F1"
+ FIELD_SEP);
b.append("Precision" + FIELD_SEP + "Recall" + FIELD_SEP + "F1"
+ FIELD_SEP);
b.append("Precision" + FIELD_SEP + "Recall" + FIELD_SEP + "F1"
+ FIELD_SEP + "F1-Pn" + FIELD_SEP + "F1-Pnn" + "\n");
for (Label l : posNegNeu) {
b.append(evaluator.getPrecisionFor(l) + FIELD_SEP
+ evaluator.getRecallFor(l) + FIELD_SEP
+ evaluator.getF1For(l) + FIELD_SEP);
}
Object[] args = new Object[1];
args[0] = posNeg;
b.append(evaluator.getPerformanceMeasure("MeanF1For", args) + FIELD_SEP);
b.append(evaluator.getPerformanceMeasure("MeanF1"));
System.out.println(b.toString());
return evaluator.getMacroF1();
}
private static Label findLabel(String string, List<Label> classes) {
for (Label l : classes) {
if (l.toString().equalsIgnoreCase(string))
return l;
}
return null;
}
private static float tune(SimpleDataset allTrainingSet, Kernel kernel,
float split, float[] cs) throws NoSuchPerformanceMeasureException,
IOException {
float bestC = 0.0f;
float bestF1 = -Float.MAX_VALUE;
// Split data according to a fix split
Dataset[] split2 = allTrainingSet
.splitClassDistributionInvariant(split);
SimpleDataset trainingSet = (SimpleDataset) split2[0];
SimpleDataset testSet = (SimpleDataset) split2[1];
// tune parameter C
for (float c : cs) {
float f1 = test(trainingSet, kernel, c, testSet, false);
System.out.println("C:" + c + "\t" + f1);
if (f1 > bestF1) {
bestF1 = f1;
bestC = c;
}
}
return bestC;
}
private static Kernel getBowKernel(int cacheSize) {
Kernel kernel = new LinearKernel("BOW");
FixIndexSquaredNormCache normcache = new FixIndexSquaredNormCache(
cacheSize);
kernel.setSquaredNormCache(normcache);
kernel = new NormalizationKernel(kernel);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
kernel.setKernelCache(cache);
return kernel;
}
private static Kernel getPolyBow(int cacheSize, float polyD) {
Kernel kernel1 = new LinearKernel("BOW");
kernel1 = new PolynomialKernel(polyD, kernel1);
FixIndexSquaredNormCache normcache1 = new FixIndexSquaredNormCache(
cacheSize);
kernel1.setSquaredNormCache(normcache1);
kernel1 = new NormalizationKernel(kernel1);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
kernel1.setKernelCache(cache);
return kernel1;
}
private static Kernel getWordspaceKernel(int cacheSize) {
Kernel kernel2 = new LinearKernel("WS");
FixIndexSquaredNormCache normcache2 = new FixIndexSquaredNormCache(
cacheSize);
kernel2.setSquaredNormCache(normcache2);
kernel2 = new NormalizationKernel(kernel2);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
kernel2.setKernelCache(cache);
return kernel2;
}
private static Kernel getRbfWordspaceKernel(int cacheSize, float gamma) {
Kernel kernel2 = new LinearKernel("WS");
kernel2 = new RbfKernel(gamma, kernel2);
FixIndexSquaredNormCache normcache2 = new FixIndexSquaredNormCache(
cacheSize);
kernel2.setSquaredNormCache(normcache2);
kernel2 = new NormalizationKernel(kernel2);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
kernel2.setKernelCache(cache);
return kernel2;
}
private static Kernel getBowWordSpaceKernel(int cacheSize) {
Kernel kernel1 = new LinearKernel("BOW");
FixIndexSquaredNormCache normcache1 = new FixIndexSquaredNormCache(
cacheSize);
kernel1.setSquaredNormCache(normcache1);
kernel1 = new NormalizationKernel(kernel1);
Kernel kernel2 = new LinearKernel("WS");
FixIndexSquaredNormCache normcache2 = new FixIndexSquaredNormCache(
cacheSize);
kernel2.setSquaredNormCache(normcache2);
kernel2 = new NormalizationKernel(kernel2);
LinearKernelCombination combination = new LinearKernelCombination();
combination.addKernel(1.0f, kernel1);
combination.addKernel(1.0f, kernel2);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
combination.setKernelCache(cache);
return combination;
}
private static Kernel getPolyBowRbfWordspaceKernel(int cacheSize,
float polyD, float gamma) {
Kernel kernel1 = new LinearKernel("BOW");
kernel1 = new PolynomialKernel(polyD, kernel1);
FixIndexSquaredNormCache normcache1 = new FixIndexSquaredNormCache(
cacheSize);
kernel1.setSquaredNormCache(normcache1);
kernel1 = new NormalizationKernel(kernel1);
Kernel kernel2 = new LinearKernel("WS");
kernel2 = new RbfKernel(gamma, kernel2);
FixIndexSquaredNormCache normcache2 = new FixIndexSquaredNormCache(
cacheSize);
kernel2.setSquaredNormCache(normcache2);
kernel2 = new NormalizationKernel(kernel2);
LinearKernelCombination combination = new LinearKernelCombination();
combination.addKernel(1.0f, kernel1);
combination.addKernel(1.0f, kernel2);
FixSizeKernelCache cache = new FixSizeKernelCache(cacheSize);
combination.setKernelCache(cache);
return combination;
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sqs.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* For each message in the batch, the response contains a <code> <a>DeleteMessageBatchResultEntry</a> </code> tag if the
* message is deleted or a <code> <a>BatchResultErrorEntry</a> </code> tag if the message can't be deleted.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sqs-2012-11-05/DeleteMessageBatch" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteMessageBatchResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<DeleteMessageBatchResultEntry> successful;
/**
* <p>
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<BatchResultErrorEntry> failed;
/**
* <p>
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* </p>
*
* @return A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
*/
public java.util.List<DeleteMessageBatchResultEntry> getSuccessful() {
if (successful == null) {
successful = new com.amazonaws.internal.SdkInternalList<DeleteMessageBatchResultEntry>();
}
return successful;
}
/**
* <p>
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* </p>
*
* @param successful
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
*/
public void setSuccessful(java.util.Collection<DeleteMessageBatchResultEntry> successful) {
if (successful == null) {
this.successful = null;
return;
}
this.successful = new com.amazonaws.internal.SdkInternalList<DeleteMessageBatchResultEntry>(successful);
}
/**
* <p>
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSuccessful(java.util.Collection)} or {@link #withSuccessful(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param successful
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteMessageBatchResult withSuccessful(DeleteMessageBatchResultEntry... successful) {
if (this.successful == null) {
setSuccessful(new com.amazonaws.internal.SdkInternalList<DeleteMessageBatchResultEntry>(successful.length));
}
for (DeleteMessageBatchResultEntry ele : successful) {
this.successful.add(ele);
}
return this;
}
/**
* <p>
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* </p>
*
* @param successful
* A list of <code> <a>DeleteMessageBatchResultEntry</a> </code> items.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteMessageBatchResult withSuccessful(java.util.Collection<DeleteMessageBatchResultEntry> successful) {
setSuccessful(successful);
return this;
}
/**
* <p>
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* </p>
*
* @return A list of <code> <a>BatchResultErrorEntry</a> </code> items.
*/
public java.util.List<BatchResultErrorEntry> getFailed() {
if (failed == null) {
failed = new com.amazonaws.internal.SdkInternalList<BatchResultErrorEntry>();
}
return failed;
}
/**
* <p>
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* </p>
*
* @param failed
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
*/
public void setFailed(java.util.Collection<BatchResultErrorEntry> failed) {
if (failed == null) {
this.failed = null;
return;
}
this.failed = new com.amazonaws.internal.SdkInternalList<BatchResultErrorEntry>(failed);
}
/**
* <p>
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFailed(java.util.Collection)} or {@link #withFailed(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param failed
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteMessageBatchResult withFailed(BatchResultErrorEntry... failed) {
if (this.failed == null) {
setFailed(new com.amazonaws.internal.SdkInternalList<BatchResultErrorEntry>(failed.length));
}
for (BatchResultErrorEntry ele : failed) {
this.failed.add(ele);
}
return this;
}
/**
* <p>
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* </p>
*
* @param failed
* A list of <code> <a>BatchResultErrorEntry</a> </code> items.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteMessageBatchResult withFailed(java.util.Collection<BatchResultErrorEntry> failed) {
setFailed(failed);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSuccessful() != null)
sb.append("Successful: ").append(getSuccessful()).append(",");
if (getFailed() != null)
sb.append("Failed: ").append(getFailed());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteMessageBatchResult == false)
return false;
DeleteMessageBatchResult other = (DeleteMessageBatchResult) obj;
if (other.getSuccessful() == null ^ this.getSuccessful() == null)
return false;
if (other.getSuccessful() != null && other.getSuccessful().equals(this.getSuccessful()) == false)
return false;
if (other.getFailed() == null ^ this.getFailed() == null)
return false;
if (other.getFailed() != null && other.getFailed().equals(this.getFailed()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSuccessful() == null) ? 0 : getSuccessful().hashCode());
hashCode = prime * hashCode + ((getFailed() == null) ? 0 : getFailed().hashCode());
return hashCode;
}
@Override
public DeleteMessageBatchResult clone() {
try {
return (DeleteMessageBatchResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying a surface and related swapchain creation parameters.
*
* <h5>Description</h5>
*
* <p>The members of {@link VkPhysicalDeviceSurfaceInfo2KHR} correspond to the arguments to {@link KHRSurface#vkGetPhysicalDeviceSurfaceCapabilitiesKHR GetPhysicalDeviceSurfaceCapabilitiesKHR}, with {@code sType} and {@code pNext} added for extensibility.</p>
*
* <p>Additional capabilities of a surface <b>may</b> be available to swapchains created with different full-screen exclusive settings - particularly if exclusive full-screen access is application controlled. These additional capabilities <b>can</b> be queried by adding a {@link VkSurfaceFullScreenExclusiveInfoEXT} structure to the {@code pNext} chain of this structure when used to query surface properties. Additionally, for Win32 surfaces with application controlled exclusive full-screen access, chaining a {@link VkSurfaceFullScreenExclusiveWin32InfoEXT} structure <b>may</b> also report additional surface capabilities. These additional capabilities only apply to swapchains created with the same parameters included in the {@code pNext} chain of {@link VkSwapchainCreateInfoKHR}.</p>
*
* <h5>Valid Usage</h5>
*
* <ul>
* <li>If the {@code pNext} chain includes a {@link VkSurfaceFullScreenExclusiveInfoEXT} structure with its {@code fullScreenExclusive} member set to {@link EXTFullScreenExclusive#VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT}, and {@code surface} was created using {@link KHRWin32Surface#vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR}, a {@link VkSurfaceFullScreenExclusiveWin32InfoEXT} structure <b>must</b> be included in the {@code pNext} chain</li>
* </ul>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code sType} <b>must</b> be {@link KHRGetSurfaceCapabilities2#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR}</li>
* <li>Each {@code pNext} member of any structure (including this one) in the {@code pNext} chain <b>must</b> be either {@code NULL} or a pointer to a valid instance of {@link VkSurfaceFullScreenExclusiveInfoEXT} or {@link VkSurfaceFullScreenExclusiveWin32InfoEXT}</li>
* <li>The {@code sType} value of each struct in the {@code pNext} chain <b>must</b> be unique</li>
* <li>{@code surface} <b>must</b> be a valid {@code VkSurfaceKHR} handle</li>
* </ul>
*
* <h5>See Also</h5>
*
* <p>{@link EXTFullScreenExclusive#vkGetDeviceGroupSurfacePresentModes2EXT GetDeviceGroupSurfacePresentModes2EXT}, {@link KHRGetSurfaceCapabilities2#vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR}, {@link KHRGetSurfaceCapabilities2#vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR}, {@link EXTFullScreenExclusive#vkGetPhysicalDeviceSurfacePresentModes2EXT GetPhysicalDeviceSurfacePresentModes2EXT}</p>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkPhysicalDeviceSurfaceInfo2KHR {
* VkStructureType {@link #sType};
* void const * {@link #pNext};
* VkSurfaceKHR {@link #surface};
* }</code></pre>
*/
public class VkPhysicalDeviceSurfaceInfo2KHR extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
STYPE,
PNEXT,
SURFACE;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(8)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
STYPE = layout.offsetof(0);
PNEXT = layout.offsetof(1);
SURFACE = layout.offsetof(2);
}
/**
* Creates a {@code VkPhysicalDeviceSurfaceInfo2KHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkPhysicalDeviceSurfaceInfo2KHR(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the type of this structure. */
@NativeType("VkStructureType")
public int sType() { return nsType(address()); }
/** {@code NULL} or a pointer to a structure extending this structure. */
@NativeType("void const *")
public long pNext() { return npNext(address()); }
/** the surface that will be associated with the swapchain. */
@NativeType("VkSurfaceKHR")
public long surface() { return nsurface(address()); }
/** Sets the specified value to the {@link #sType} field. */
public VkPhysicalDeviceSurfaceInfo2KHR sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link KHRGetSurfaceCapabilities2#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR} value to the {@link #sType} field. */
public VkPhysicalDeviceSurfaceInfo2KHR sType$Default() { return sType(KHRGetSurfaceCapabilities2.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR); }
/** Sets the specified value to the {@link #pNext} field. */
public VkPhysicalDeviceSurfaceInfo2KHR pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; }
/** Prepends the specified {@link VkSurfaceFullScreenExclusiveInfoEXT} value to the {@code pNext} chain. */
public VkPhysicalDeviceSurfaceInfo2KHR pNext(VkSurfaceFullScreenExclusiveInfoEXT value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Prepends the specified {@link VkSurfaceFullScreenExclusiveWin32InfoEXT} value to the {@code pNext} chain. */
public VkPhysicalDeviceSurfaceInfo2KHR pNext(VkSurfaceFullScreenExclusiveWin32InfoEXT value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Sets the specified value to the {@link #surface} field. */
public VkPhysicalDeviceSurfaceInfo2KHR surface(@NativeType("VkSurfaceKHR") long value) { nsurface(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkPhysicalDeviceSurfaceInfo2KHR set(
int sType,
long pNext,
long surface
) {
sType(sType);
pNext(pNext);
surface(surface);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkPhysicalDeviceSurfaceInfo2KHR set(VkPhysicalDeviceSurfaceInfo2KHR src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkPhysicalDeviceSurfaceInfo2KHR malloc() {
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkPhysicalDeviceSurfaceInfo2KHR calloc() {
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance allocated with {@link BufferUtils}. */
public static VkPhysicalDeviceSurfaceInfo2KHR create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, memAddress(container), container);
}
/** Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance for the specified memory address. */
public static VkPhysicalDeviceSurfaceInfo2KHR create(long address) {
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkPhysicalDeviceSurfaceInfo2KHR createSafe(long address) {
return address == NULL ? null : wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, address);
}
/**
* Returns a new {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkPhysicalDeviceSurfaceInfo2KHR malloc(MemoryStack stack) {
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkPhysicalDeviceSurfaceInfo2KHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkPhysicalDeviceSurfaceInfo2KHR calloc(MemoryStack stack) {
return wrap(VkPhysicalDeviceSurfaceInfo2KHR.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkPhysicalDeviceSurfaceInfo2KHR.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #sType}. */
public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkPhysicalDeviceSurfaceInfo2KHR.STYPE); }
/** Unsafe version of {@link #pNext}. */
public static long npNext(long struct) { return memGetAddress(struct + VkPhysicalDeviceSurfaceInfo2KHR.PNEXT); }
/** Unsafe version of {@link #surface}. */
public static long nsurface(long struct) { return UNSAFE.getLong(null, struct + VkPhysicalDeviceSurfaceInfo2KHR.SURFACE); }
/** Unsafe version of {@link #sType(int) sType}. */
public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkPhysicalDeviceSurfaceInfo2KHR.STYPE, value); }
/** Unsafe version of {@link #pNext(long) pNext}. */
public static void npNext(long struct, long value) { memPutAddress(struct + VkPhysicalDeviceSurfaceInfo2KHR.PNEXT, value); }
/** Unsafe version of {@link #surface(long) surface}. */
public static void nsurface(long struct, long value) { UNSAFE.putLong(null, struct + VkPhysicalDeviceSurfaceInfo2KHR.SURFACE, value); }
// -----------------------------------
/** An array of {@link VkPhysicalDeviceSurfaceInfo2KHR} structs. */
public static class Buffer extends StructBuffer<VkPhysicalDeviceSurfaceInfo2KHR, Buffer> implements NativeResource {
private static final VkPhysicalDeviceSurfaceInfo2KHR ELEMENT_FACTORY = VkPhysicalDeviceSurfaceInfo2KHR.create(-1L);
/**
* Creates a new {@code VkPhysicalDeviceSurfaceInfo2KHR.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkPhysicalDeviceSurfaceInfo2KHR#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkPhysicalDeviceSurfaceInfo2KHR getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkPhysicalDeviceSurfaceInfo2KHR#sType} field. */
@NativeType("VkStructureType")
public int sType() { return VkPhysicalDeviceSurfaceInfo2KHR.nsType(address()); }
/** @return the value of the {@link VkPhysicalDeviceSurfaceInfo2KHR#pNext} field. */
@NativeType("void const *")
public long pNext() { return VkPhysicalDeviceSurfaceInfo2KHR.npNext(address()); }
/** @return the value of the {@link VkPhysicalDeviceSurfaceInfo2KHR#surface} field. */
@NativeType("VkSurfaceKHR")
public long surface() { return VkPhysicalDeviceSurfaceInfo2KHR.nsurface(address()); }
/** Sets the specified value to the {@link VkPhysicalDeviceSurfaceInfo2KHR#sType} field. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer sType(@NativeType("VkStructureType") int value) { VkPhysicalDeviceSurfaceInfo2KHR.nsType(address(), value); return this; }
/** Sets the {@link KHRGetSurfaceCapabilities2#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR} value to the {@link VkPhysicalDeviceSurfaceInfo2KHR#sType} field. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer sType$Default() { return sType(KHRGetSurfaceCapabilities2.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR); }
/** Sets the specified value to the {@link VkPhysicalDeviceSurfaceInfo2KHR#pNext} field. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer pNext(@NativeType("void const *") long value) { VkPhysicalDeviceSurfaceInfo2KHR.npNext(address(), value); return this; }
/** Prepends the specified {@link VkSurfaceFullScreenExclusiveInfoEXT} value to the {@code pNext} chain. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer pNext(VkSurfaceFullScreenExclusiveInfoEXT value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Prepends the specified {@link VkSurfaceFullScreenExclusiveWin32InfoEXT} value to the {@code pNext} chain. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer pNext(VkSurfaceFullScreenExclusiveWin32InfoEXT value) { return this.pNext(value.pNext(this.pNext()).address()); }
/** Sets the specified value to the {@link VkPhysicalDeviceSurfaceInfo2KHR#surface} field. */
public VkPhysicalDeviceSurfaceInfo2KHR.Buffer surface(@NativeType("VkSurfaceKHR") long value) { VkPhysicalDeviceSurfaceInfo2KHR.nsurface(address(), value); return this; }
}
}
| |
/**
* Most of the code in the Qalingo project is copyrighted Hoteia and licensed
* under the Apache License Version 2.0 (release version 0.8.0)
* http://www.apache.org/licenses/LICENSE-2.0
*
* Copyright (c) Hoteia, 2012-2014
* http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com
*
*/
package org.hoteia.qalingo.core.domain;
import java.util.Arrays;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Version;
@Entity
@Table(name="TECO_PRODUCT_BRAND_ATTRIBUTE")
public class ProductBrandAttribute extends AbstractAttribute {
/**
* Generated UID
*/
private static final long serialVersionUID = 8520047419340994580L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name="ID", nullable=false)
private Long id;
@Version
@Column(name="VERSION", nullable=false, columnDefinition="int(11) default 1")
private int version;
@ManyToOne(fetch = FetchType.EAGER)
@JoinColumn(name="ATTRIBUTE_DEFINITION_ID", insertable = true, updatable = true)
private AttributeDefinition attributeDefinition;
@Column(name = "SHORT_STRING_VALUE")
private String shortStringValue;
@Column(name = "LONG_STRING_VALUE")
@Lob
private String longStringValue;
@Column(name="INTEGER_VALUE")
private Integer integerValue;
@Column(name="DOUBLE_VALUE")
private Double doubleValue;
@Column(name="FLOAT_VALUE")
private Float floatValue;
@Column(name="BLOB_VALUE")
@Lob
private byte[] blobValue;
@Column(name="BOOLEAN_VALUE")
private Boolean booleanValue;
@Column(name="LOCALIZATION_CODE")
private String localizationCode;
@Column(name="MARKET_AREA_ID")
private Long marketAreaId;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="START_DATE")
private Date startDate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="END_DATE")
private Date endDate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_CREATE")
private Date dateCreate;
@Temporal(TemporalType.TIMESTAMP)
@Column(name="DATE_UPDATE")
private Date dateUpdate;
public ProductBrandAttribute() {
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public AttributeDefinition getAttributeDefinition() {
return attributeDefinition;
}
public void setAttributeDefinition(AttributeDefinition attributeDefinition) {
this.attributeDefinition = attributeDefinition;
}
public String getShortStringValue() {
return shortStringValue;
}
public void setShortStringValue(String shortStringValue) {
this.shortStringValue = shortStringValue;
}
public String getLongStringValue() {
return longStringValue;
}
public void setLongStringValue(String longStringValue) {
this.longStringValue = longStringValue;
}
public Integer getIntegerValue() {
return integerValue;
}
public void setIntegerValue(Integer integerValue) {
this.integerValue = integerValue;
}
public Double getDoubleValue() {
return doubleValue;
}
public void setDoubleValue(Double doubleValue) {
this.doubleValue = doubleValue;
}
public Float getFloatValue() {
return floatValue;
}
public void setFloatValue(Float floatValue) {
this.floatValue = floatValue;
}
public byte[] getBlobValue() {
return blobValue;
}
public void setBlobValue(byte[] blobValue) {
this.blobValue = blobValue;
}
public Boolean getBooleanValue() {
return booleanValue;
}
public void setBooleanValue(Boolean booleanValue) {
this.booleanValue = booleanValue;
}
public String getLocalizationCode() {
return localizationCode;
}
public void setLocalizationCode(String localizationCode) {
this.localizationCode = localizationCode;
}
public Long getMarketAreaId() {
return marketAreaId;
}
public void setMarketAreaId(Long marketAreaId) {
this.marketAreaId = marketAreaId;
}
public Date getStartDate() {
return startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
public Date getDateCreate() {
return dateCreate;
}
public void setDateCreate(Date dateCreate) {
this.dateCreate = dateCreate;
}
public Date getDateUpdate() {
return dateUpdate;
}
public void setDateUpdate(Date dateUpdate) {
this.dateUpdate = dateUpdate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dateCreate == null) ? 0 : dateCreate.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ProductBrandAttribute other = (ProductBrandAttribute) obj;
if (dateCreate == null) {
if (other.dateCreate != null)
return false;
} else if (!dateCreate.equals(other.dateCreate))
return false;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
@Override
public String toString() {
return "ProductBrandAttribute [id=" + id + ", version=" + version + ", shortStringValue=" + shortStringValue + ", longStringValue=" + longStringValue + ", integerValue=" + integerValue + ", doubleValue=" + doubleValue + ", floatValue="
+ floatValue + ", blobValue=" + Arrays.toString(blobValue) + ", booleanValue=" + booleanValue + ", localizationCode=" + localizationCode + ", marketAreaId=" + marketAreaId + ", startDate=" + startDate + ", endDate=" + endDate + ", dateCreate=" + dateCreate + ", dateUpdate=" + dateUpdate + "]";
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.clouddirectory.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/clouddirectory-2017-01-11/DetachFromIndex" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DetachFromIndexRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
* </p>
*/
private String directoryArn;
/**
* <p>
* A reference to the index object.
* </p>
*/
private ObjectReference indexReference;
/**
* <p>
* A reference to the object being detached from the index.
* </p>
*/
private ObjectReference targetReference;
/**
* <p>
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
* </p>
*
* @param directoryArn
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
*/
public void setDirectoryArn(String directoryArn) {
this.directoryArn = directoryArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
* </p>
*
* @return The Amazon Resource Name (ARN) of the directory the index and object exist in.
*/
public String getDirectoryArn() {
return this.directoryArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
* </p>
*
* @param directoryArn
* The Amazon Resource Name (ARN) of the directory the index and object exist in.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DetachFromIndexRequest withDirectoryArn(String directoryArn) {
setDirectoryArn(directoryArn);
return this;
}
/**
* <p>
* A reference to the index object.
* </p>
*
* @param indexReference
* A reference to the index object.
*/
public void setIndexReference(ObjectReference indexReference) {
this.indexReference = indexReference;
}
/**
* <p>
* A reference to the index object.
* </p>
*
* @return A reference to the index object.
*/
public ObjectReference getIndexReference() {
return this.indexReference;
}
/**
* <p>
* A reference to the index object.
* </p>
*
* @param indexReference
* A reference to the index object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DetachFromIndexRequest withIndexReference(ObjectReference indexReference) {
setIndexReference(indexReference);
return this;
}
/**
* <p>
* A reference to the object being detached from the index.
* </p>
*
* @param targetReference
* A reference to the object being detached from the index.
*/
public void setTargetReference(ObjectReference targetReference) {
this.targetReference = targetReference;
}
/**
* <p>
* A reference to the object being detached from the index.
* </p>
*
* @return A reference to the object being detached from the index.
*/
public ObjectReference getTargetReference() {
return this.targetReference;
}
/**
* <p>
* A reference to the object being detached from the index.
* </p>
*
* @param targetReference
* A reference to the object being detached from the index.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DetachFromIndexRequest withTargetReference(ObjectReference targetReference) {
setTargetReference(targetReference);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDirectoryArn() != null)
sb.append("DirectoryArn: ").append(getDirectoryArn()).append(",");
if (getIndexReference() != null)
sb.append("IndexReference: ").append(getIndexReference()).append(",");
if (getTargetReference() != null)
sb.append("TargetReference: ").append(getTargetReference());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DetachFromIndexRequest == false)
return false;
DetachFromIndexRequest other = (DetachFromIndexRequest) obj;
if (other.getDirectoryArn() == null ^ this.getDirectoryArn() == null)
return false;
if (other.getDirectoryArn() != null && other.getDirectoryArn().equals(this.getDirectoryArn()) == false)
return false;
if (other.getIndexReference() == null ^ this.getIndexReference() == null)
return false;
if (other.getIndexReference() != null && other.getIndexReference().equals(this.getIndexReference()) == false)
return false;
if (other.getTargetReference() == null ^ this.getTargetReference() == null)
return false;
if (other.getTargetReference() != null && other.getTargetReference().equals(this.getTargetReference()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDirectoryArn() == null) ? 0 : getDirectoryArn().hashCode());
hashCode = prime * hashCode + ((getIndexReference() == null) ? 0 : getIndexReference().hashCode());
hashCode = prime * hashCode + ((getTargetReference() == null) ? 0 : getTargetReference().hashCode());
return hashCode;
}
@Override
public DetachFromIndexRequest clone() {
return (DetachFromIndexRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.metrics.prometheus.tests;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.prometheus.PrometheusReporter;
import org.apache.flink.metrics.prometheus.PrometheusReporterFactory;
import org.apache.flink.tests.util.AutoClosableProcess;
import org.apache.flink.tests.util.CommandLineWrapper;
import org.apache.flink.tests.util.cache.DownloadCache;
import org.apache.flink.tests.util.categories.TravisGroup1;
import org.apache.flink.tests.util.flink.ClusterController;
import org.apache.flink.tests.util.flink.FlinkResource;
import org.apache.flink.tests.util.flink.FlinkResourceSetup;
import org.apache.flink.tests.util.flink.JarLocation;
import org.apache.flink.tests.util.flink.LocalStandaloneFlinkResourceFactory;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.OperatingSystem;
import org.apache.flink.util.ProcessorArchitecture;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.FACTORY;
import static org.apache.flink.metrics.prometheus.tests.PrometheusReporterEndToEndITCase.TestParams.InstantiationType.REFLECTION;
import static org.apache.flink.tests.util.AutoClosableProcess.runBlocking;
import static org.apache.flink.tests.util.AutoClosableProcess.runNonBlocking;
/**
* End-to-end test for the PrometheusReporter.
*/
@Category(TravisGroup1.class)
@RunWith(Parameterized.class)
public class PrometheusReporterEndToEndITCase extends TestLogger {
private static final Logger LOG = LoggerFactory.getLogger(PrometheusReporterEndToEndITCase.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String PROMETHEUS_VERSION = "2.4.3";
private static final String PROMETHEUS_FILE_NAME;
private static final String PROMETHEUS_JAR_PREFIX = "flink-metrics-prometheus";
static {
final String base = "prometheus-" + PROMETHEUS_VERSION + '.';
final String os;
final String platform;
switch (OperatingSystem.getCurrentOperatingSystem()) {
case MAC_OS:
os = "darwin";
break;
case WINDOWS:
os = "windows";
break;
default:
os = "linux";
break;
}
switch (ProcessorArchitecture.getProcessorArchitecture()) {
case X86:
platform = "386";
break;
case AMD64:
platform = "amd64";
break;
case ARMv7:
platform = "armv7";
break;
case AARCH64:
platform = "arm64";
break;
default:
platform = "Unknown";
break;
}
PROMETHEUS_FILE_NAME = base + os + "-" + platform;
}
private static final Pattern LOG_REPORTER_PORT_PATTERN = Pattern.compile(".*Started PrometheusReporter HTTP server on port ([0-9]+).*");
@BeforeClass
public static void checkOS() {
Assume.assumeFalse("This test does not run on Windows.", OperatingSystem.isWindows());
}
@Parameterized.Parameters(name = "{index}: {0}")
public static Collection<TestParams> testParameters() {
return Arrays.asList(
TestParams.from("Jar in 'lib'",
builder -> builder.copyJar(PROMETHEUS_JAR_PREFIX, JarLocation.OPT, JarLocation.LIB),
REFLECTION),
TestParams.from("Jar in 'lib'",
builder -> builder.copyJar(PROMETHEUS_JAR_PREFIX, JarLocation.OPT, JarLocation.LIB),
FACTORY),
TestParams.from("Jar in 'plugins'",
builder -> builder.copyJar(PROMETHEUS_JAR_PREFIX, JarLocation.OPT, JarLocation.PLUGINS),
FACTORY),
TestParams.from("Jar in 'lib' and 'plugins'",
builder -> {
builder.copyJar(PROMETHEUS_JAR_PREFIX, JarLocation.OPT, JarLocation.LIB);
builder.copyJar(PROMETHEUS_JAR_PREFIX, JarLocation.OPT, JarLocation.PLUGINS);
},
FACTORY)
);
}
@Rule
public final FlinkResource dist;
public PrometheusReporterEndToEndITCase(TestParams params) {
final FlinkResourceSetup.FlinkResourceSetupBuilder builder = FlinkResourceSetup.builder();
params.getBuilderSetup().accept(builder);
builder.addConfiguration(getFlinkConfig(params.getInstantiationType()));
dist = new LocalStandaloneFlinkResourceFactory().create(builder.build());
}
@Rule
public final TemporaryFolder tmp = new TemporaryFolder();
@Rule
public final DownloadCache downloadCache = DownloadCache.get();
private static Configuration getFlinkConfig(TestParams.InstantiationType instantiationType) {
final Configuration config = new Configuration();
switch (instantiationType) {
case FACTORY:
config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "prom." + ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, PrometheusReporterFactory.class.getName());
break;
case REFLECTION:
config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "prom." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, PrometheusReporter.class.getCanonicalName());
}
config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "prom.port", "9000-9100");
return config;
}
@Test
public void testReporter() throws Exception {
final Path tmpPrometheusDir = tmp.newFolder().toPath().resolve("prometheus");
final Path prometheusBinDir = tmpPrometheusDir.resolve(PROMETHEUS_FILE_NAME);
final Path prometheusConfig = prometheusBinDir.resolve("prometheus.yml");
final Path prometheusBinary = prometheusBinDir.resolve("prometheus");
Files.createDirectory(tmpPrometheusDir);
final Path prometheusArchive = downloadCache.getOrDownload(
"https://github.com/prometheus/prometheus/releases/download/v" + PROMETHEUS_VERSION + '/' + PROMETHEUS_FILE_NAME + ".tar.gz",
tmpPrometheusDir
);
LOG.info("Unpacking Prometheus.");
runBlocking(
CommandLineWrapper
.tar(prometheusArchive)
.extract()
.zipped()
.targetDir(tmpPrometheusDir)
.build());
LOG.info("Setting Prometheus scrape interval.");
runBlocking(
CommandLineWrapper
.sed("s/\\(scrape_interval:\\).*/\\1 1s/", prometheusConfig)
.inPlace()
.build());
try (ClusterController ignored = dist.startCluster(1)) {
final List<Integer> ports = dist
.searchAllLogs(LOG_REPORTER_PORT_PATTERN, matcher -> matcher.group(1))
.map(Integer::valueOf)
.collect(Collectors.toList());
final String scrapeTargets = ports.stream()
.map(port -> "'localhost:" + port + "'")
.collect(Collectors.joining(", "));
LOG.info("Setting Prometheus scrape targets to {}.", scrapeTargets);
runBlocking(
CommandLineWrapper
.sed("s/\\(targets:\\).*/\\1 [" + scrapeTargets + "]/", prometheusConfig)
.inPlace()
.build());
LOG.info("Starting Prometheus server.");
try (AutoClosableProcess prometheus = runNonBlocking(
prometheusBinary.toAbsolutePath().toString(),
"--config.file=" + prometheusConfig.toAbsolutePath(),
"--storage.tsdb.path=" + prometheusBinDir.resolve("data").toAbsolutePath())) {
final OkHttpClient client = new OkHttpClient();
checkMetricAvailability(client, "flink_jobmanager_numRegisteredTaskManagers");
checkMetricAvailability(client, "flink_taskmanager_Status_Network_TotalMemorySegments");
}
}
}
private static void checkMetricAvailability(final OkHttpClient client, final String metric) throws InterruptedException {
final Request jobManagerRequest = new Request.Builder()
.get()
.url("http://localhost:9090/api/v1/query?query=" + metric)
.build();
Exception reportedException = null;
for (int x = 0; x < 30; x++) {
try (Response response = client.newCall(jobManagerRequest).execute()) {
if (response.isSuccessful()) {
final String json = response.body().string();
// Sample response:
//{
// "status": "success",
// "data": {
// "resultType": "vector",
// "result": [{
// "metric": {
// "__name__": "flink_jobmanager_numRegisteredTaskManagers",
// "host": "localhost",
// "instance": "localhost:9000",
// "job": "prometheus"
// },
// "value": [1540548500.107, "1"]
// }]
// }
//}
OBJECT_MAPPER.readTree(json)
.get("data")
.get("result")
.get(0)
.get("value")
.get(1).asInt();
// if we reach this point some value for the given metric was reported to prometheus
return;
} else {
LOG.info("Retrieving metric failed. Retrying... " + response.code() + ":" + response.message());
Thread.sleep(1000);
}
} catch (Exception e) {
reportedException = ExceptionUtils.firstOrSuppressed(e, reportedException);
Thread.sleep(1000);
}
}
throw new AssertionError("Could not retrieve metric " + metric + " from Prometheus.", reportedException);
}
static class TestParams {
private final String jarLocationDescription;
private final Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup;
private final InstantiationType instantiationType;
private TestParams(String jarLocationDescription, Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup, InstantiationType instantiationType) {
this.jarLocationDescription = jarLocationDescription;
this.builderSetup = builderSetup;
this.instantiationType = instantiationType;
}
public static TestParams from(String jarLocationDesription, Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup, InstantiationType instantiationType) {
return new TestParams(jarLocationDesription, builderSetup, instantiationType);
}
public Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> getBuilderSetup() {
return builderSetup;
}
public InstantiationType getInstantiationType() {
return instantiationType;
}
@Override
public String toString() {
return jarLocationDescription + ", instantiated via " + instantiationType.name().toLowerCase();
}
public enum InstantiationType {
REFLECTION,
FACTORY
}
}
}
| |
package com.airbiquity.hap.tts;
import java.io.UnsupportedEncodingException;
/**
* Utilities for encoding and decoding the Base64 representation of
* binary data. See RFCs <a
* href="http://www.ietf.org/rfc/rfc2045.txt">2045</a> and <a
* href="http://www.ietf.org/rfc/rfc3548.txt">3548</a>.
*/
public class Base64Utils {
/**
* Default values for encoder/decoder flags.
*/
public static final int DEFAULT = 0;
/**
* Encoder flag bit to omit the padding '=' characters at the end
* of the output (if any).
*/
public static final int NO_PADDING = 1;
/**
* Encoder flag bit to omit all line terminators (i.e., the output
* will be on one long line).
*/
public static final int NO_WRAP = 2;
/**
* Encoder flag bit to indicate lines should be terminated with a
* CRLF pair instead of just an LF. Has no effect if {@code
* NO_WRAP} is specified as well.
*/
public static final int CRLF = 4;
/**
* Encoder/decoder flag bit to indicate using the "URL and
* filename safe" variant of Base64 (see RFC 3548 section 4) where
* {@code -} and {@code _} are used in place of {@code +} and
* {@code /}.
*/
public static final int URL_SAFE = 8;
/**
* Flag to pass to {@link Base64OutputStream} to indicate that it
* should not close the output stream it is wrapping when it
* itself is closed.
*/
public static final int NO_CLOSE = 16;
// --------------------------------------------------------
// shared code
// --------------------------------------------------------
/* package */ static abstract class Coder {
public byte[] output;
public int op;
/**
* Encode/decode another block of input data. this.output is
* provided by the caller, and must be big enough to hold all
* the coded data. On exit, this.opwill be set to the length
* of the coded data.
*
* @param finish true if this is the final call to process for
* this object. Will finalize the coder state and
* include any final bytes in the output.
*
* @return true if the input so far is good; false if some
* error has been detected in the input stream..
*/
public abstract boolean process(byte[] input, int offset, int len, boolean finish);
/**
* @return the maximum number of bytes a call to process()
* could produce for the given number of input bytes. This may
* be an overestimate.
*/
public abstract int maxOutputSize(int len);
}
// --------------------------------------------------------
// decoding
// --------------------------------------------------------
/**
* Decode the Base64-encoded data in input and return the data in
* a new byte array.
*
* <p>The padding '=' characters at the end are considered optional, but
* if any are present, there must be the correct number of them.
*
* @param str the input String to decode, which is converted to
* bytes using the default charset
* @param flags controls certain features of the decoded output.
* Pass {@code DEFAULT} to decode standard Base64.
*
* @throws IllegalArgumentException if the input contains
* incorrect padding
*/
public static byte[] decode(String str, int flags) {
return decode(str.getBytes(), flags);
}
/**
* Decode the Base64-encoded data in input and return the data in
* a new byte array.
*
* <p>The padding '=' characters at the end are considered optional, but
* if any are present, there must be the correct number of them.
*
* @param input the input array to decode
* @param flags controls certain features of the decoded output.
* Pass {@code DEFAULT} to decode standard Base64.
*
* @throws IllegalArgumentException if the input contains
* incorrect padding
*/
public static byte[] decode(byte[] input, int flags) {
return decode(input, 0, input.length, flags);
}
/**
* Decode the Base64-encoded data in input and return the data in
* a new byte array.
*
* <p>The padding '=' characters at the end are considered optional, but
* if any are present, there must be the correct number of them.
*
* @param input the data to decode
* @param offset the position within the input array at which to start
* @param len the number of bytes of input to decode
* @param flags controls certain features of the decoded output.
* Pass {@code DEFAULT} to decode standard Base64.
*
* @throws IllegalArgumentException if the input contains
* incorrect padding
*/
public static byte[] decode(byte[] input, int offset, int len, int flags) {
// Allocate space for the most data the input could represent.
// (It could contain less if it contains whitespace, etc.)
Decoder decoder = new Decoder(flags, new byte[len*3/4]);
if (!decoder.process(input, offset, len, true)) {
throw new IllegalArgumentException("bad base-64");
}
// Maybe we got lucky and allocated exactly enough output space.
if (decoder.op == decoder.output.length) {
return decoder.output;
}
// Need to shorten the array, so allocate a new one of the
// right size and copy.
byte[] temp = new byte[decoder.op];
System.arraycopy(decoder.output, 0, temp, 0, decoder.op);
return temp;
}
/* package */ static class Decoder extends Coder {
/**
* Lookup table for turning bytes into their position in the
* Base64 alphabet.
*/
private static final int DECODE[] = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -2, -1, -1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
};
/**
* Decode lookup table for the "web safe" variant (RFC 3548
* sec. 4) where - and _ replace + and /.
*/
private static final int DECODE_WEBSAFE[] = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -2, -1, -1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, 63,
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
};
/** Non-data values in the DECODE arrays. */
private static final int SKIP = -1;
private static final int EQUALS = -2;
/**
* States 0-3 are reading through the next input tuple.
* State 4 is having read one '=' and expecting exactly
* one more.
* State 5 is expecting no more data or padding characters
* in the input.
* State 6 is the error state; an error has been detected
* in the input and no future input can "fix" it.
*/
private int state; // state number (0 to 6)
private int value;
final private int[] alphabet;
public Decoder(int flags, byte[] output) {
this.output = output;
alphabet = ((flags & URL_SAFE) == 0) ? DECODE : DECODE_WEBSAFE;
state = 0;
value = 0;
}
/**
* @return an overestimate for the number of bytes {@code
* len} bytes could decode to.
*/
public int maxOutputSize(int len) {
return len * 3/4 + 10;
}
/**
* Decode another block of input data.
*
* @return true if the state machine is still healthy. false if
* bad base-64 data has been detected in the input stream.
*/
public boolean process(byte[] input, int offset, int len, boolean finish) {
if (this.state == 6) return false;
int p = offset;
len += offset;
// Using local variables makes the decoder about 12%
// faster than if we manipulate the member variables in
// the loop. (Even alphabet makes a measurable
// difference, which is somewhat surprising to me since
// the member variable is final.)
int state = this.state;
int value = this.value;
int op = 0;
final byte[] output = this.output;
final int[] alphabet = this.alphabet;
while (p < len) {
// Try the fast path: we're starting a new tuple and the
// next four bytes of the input stream are all data
// bytes. This corresponds to going through states
// 0-1-2-3-0. We expect to use this method for most of
// the data.
//
// If any of the next four bytes of input are non-data
// (whitespace, etc.), value will end up negative. (All
// the non-data values in decode are small negative
// numbers, so shifting any of them up and or'ing them
// together will result in a value with its top bit set.)
//
// You can remove this whole block and the output should
// be the same, just slower.
if (state == 0) {
while (p+4 <= len &&
(value = ((alphabet[input[p] & 0xff] << 18) |
(alphabet[input[p+1] & 0xff] << 12) |
(alphabet[input[p+2] & 0xff] << 6) |
(alphabet[input[p+3] & 0xff]))) >= 0) {
output[op+2] = (byte) value;
output[op+1] = (byte) (value >> 8);
output[op] = (byte) (value >> 16);
op += 3;
p += 4;
}
if (p >= len) break;
}
// The fast path isn't available -- either we've read a
// partial tuple, or the next four input bytes aren't all
// data, or whatever. Fall back to the slower state
// machine implementation.
int d = alphabet[input[p++] & 0xff];
switch (state) {
case 0:
if (d >= 0) {
value = d;
++state;
} else if (d != SKIP) {
this.state = 6;
return false;
}
break;
case 1:
if (d >= 0) {
value = (value << 6) | d;
++state;
} else if (d != SKIP) {
this.state = 6;
return false;
}
break;
case 2:
if (d >= 0) {
value = (value << 6) | d;
++state;
} else if (d == EQUALS) {
// Emit the last (partial) output tuple;
// expect exactly one more padding character.
output[op++] = (byte) (value >> 4);
state = 4;
} else if (d != SKIP) {
this.state = 6;
return false;
}
break;
case 3:
if (d >= 0) {
// Emit the output triple and return to state 0.
value = (value << 6) | d;
output[op+2] = (byte) value;
output[op+1] = (byte) (value >> 8);
output[op] = (byte) (value >> 16);
op += 3;
state = 0;
} else if (d == EQUALS) {
// Emit the last (partial) output tuple;
// expect no further data or padding characters.
output[op+1] = (byte) (value >> 2);
output[op] = (byte) (value >> 10);
op += 2;
state = 5;
} else if (d != SKIP) {
this.state = 6;
return false;
}
break;
case 4:
if (d == EQUALS) {
++state;
} else if (d != SKIP) {
this.state = 6;
return false;
}
break;
case 5:
if (d != SKIP) {
this.state = 6;
return false;
}
break;
}
}
if (!finish) {
// We're out of input, but a future call could provide
// more.
this.state = state;
this.value = value;
this.op = op;
return true;
}
// Done reading input. Now figure out where we are left in
// the state machine and finish up.
switch (state) {
case 0:
// Output length is a multiple of three. Fine.
break;
case 1:
// Read one extra input byte, which isn't enough to
// make another output byte. Illegal.
this.state = 6;
return false;
case 2:
// Read two extra input bytes, enough to emit 1 more
// output byte. Fine.
output[op++] = (byte) (value >> 4);
break;
case 3:
// Read three extra input bytes, enough to emit 2 more
// output bytes. Fine.
output[op++] = (byte) (value >> 10);
output[op++] = (byte) (value >> 2);
break;
case 4:
// Read one padding '=' when we expected 2. Illegal.
this.state = 6;
return false;
case 5:
// Read all the padding '='s we expected and no more.
// Fine.
break;
}
this.state = state;
this.op = op;
return true;
}
}
// --------------------------------------------------------
// encoding
// --------------------------------------------------------
/**
* Base64-encode the given data and return a newly allocated
* String with the result.
*
* @param input the data to encode
* @param flags controls certain features of the encoded output.
* Passing {@code DEFAULT} results in output that
* adheres to RFC 2045.
*/
public static String encodeToString(byte[] input, int flags) {
try {
return new String(encode(input, flags), "US-ASCII");
} catch (UnsupportedEncodingException e) {
// US-ASCII is guaranteed to be available.
throw new AssertionError(e);
}
}
/**
* Base64-encode the given data and return a newly allocated
* String with the result.
*
* @param input the data to encode
* @param offset the position within the input array at which to
* start
* @param len the number of bytes of input to encode
* @param flags controls certain features of the encoded output.
* Passing {@code DEFAULT} results in output that
* adheres to RFC 2045.
*/
public static String encodeToString(byte[] input, int offset, int len, int flags) {
try {
return new String(encode(input, offset, len, flags), "US-ASCII");
} catch (UnsupportedEncodingException e) {
// US-ASCII is guaranteed to be available.
throw new AssertionError(e);
}
}
/**
* Base64-encode the given data and return a newly allocated
* byte[] with the result.
*
* @param input the data to encode
* @param flags controls certain features of the encoded output.
* Passing {@code DEFAULT} results in output that
* adheres to RFC 2045.
*/
public static byte[] encode(byte[] input, int flags) {
return encode(input, 0, input.length, flags);
}
/**
* Base64-encode the given data and return a newly allocated
* byte[] with the result.
*
* @param input the data to encode
* @param offset the position within the input array at which to
* start
* @param len the number of bytes of input to encode
* @param flags controls certain features of the encoded output.
* Passing {@code DEFAULT} results in output that
* adheres to RFC 2045.
*/
public static byte[] encode(byte[] input, int offset, int len, int flags) {
Encoder encoder = new Encoder(flags, null);
// Compute the exact length of the array we will produce.
int output_len = len / 3 * 4;
// Account for the tail of the data and the padding bytes, if any.
if (encoder.do_padding) {
if (len % 3 > 0) {
output_len += 4;
}
} else {
switch (len % 3) {
case 0: break;
case 1: output_len += 2; break;
case 2: output_len += 3; break;
}
}
// Account for the newlines, if any.
if (encoder.do_newline && len > 0) {
output_len += (((len-1) / (3 * Encoder.LINE_GROUPS)) + 1) *
(encoder.do_cr ? 2 : 1);
}
encoder.output = new byte[output_len];
encoder.process(input, offset, len, true);
assert encoder.op == output_len;
return encoder.output;
}
/* package */ static class Encoder extends Coder {
/**
* Emit a new line every this many output tuples. Corresponds to
* a 76-character line length (the maximum allowable according to
* <a href="http://www.ietf.org/rfc/rfc2045.txt">RFC 2045</a>).
*/
public static final int LINE_GROUPS = 19;
/**
* Lookup table for turning Base64 alphabet positions (6 bits)
* into output bytes.
*/
private static final byte ENCODE[] = {
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/',
};
/**
* Lookup table for turning Base64 alphabet positions (6 bits)
* into output bytes.
*/
private static final byte ENCODE_WEBSAFE[] = {
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-', '_',
};
final private byte[] tail;
/* package */ int tailLen;
private int count;
final public boolean do_padding;
final public boolean do_newline;
final public boolean do_cr;
final private byte[] alphabet;
public Encoder(int flags, byte[] output) {
this.output = output;
do_padding = (flags & NO_PADDING) == 0;
do_newline = (flags & NO_WRAP) == 0;
do_cr = (flags & CRLF) != 0;
alphabet = ((flags & URL_SAFE) == 0) ? ENCODE : ENCODE_WEBSAFE;
tail = new byte[2];
tailLen = 0;
count = do_newline ? LINE_GROUPS : -1;
}
/**
* @return an overestimate for the number of bytes {@code
* len} bytes could encode to.
*/
public int maxOutputSize(int len) {
return len * 8/5 + 10;
}
public boolean process(byte[] input, int offset, int len, boolean finish) {
// Using local variables makes the encoder about 9% faster.
final byte[] alphabet = this.alphabet;
final byte[] output = this.output;
int op = 0;
int count = this.count;
int p = offset;
len += offset;
int v = -1;
// First we need to concatenate the tail of the previous call
// with any input bytes available now and see if we can empty
// the tail.
switch (tailLen) {
case 0:
// There was no tail.
break;
case 1:
if (p+2 <= len) {
// A 1-byte tail with at least 2 bytes of
// input available now.
v = ((tail[0] & 0xff) << 16) |
((input[p++] & 0xff) << 8) |
(input[p++] & 0xff);
tailLen = 0;
};
break;
case 2:
if (p+1 <= len) {
// A 2-byte tail with at least 1 byte of input.
v = ((tail[0] & 0xff) << 16) |
((tail[1] & 0xff) << 8) |
(input[p++] & 0xff);
tailLen = 0;
}
break;
}
if (v != -1) {
output[op++] = alphabet[(v >> 18) & 0x3f];
output[op++] = alphabet[(v >> 12) & 0x3f];
output[op++] = alphabet[(v >> 6) & 0x3f];
output[op++] = alphabet[v & 0x3f];
if (--count == 0) {
if (do_cr) output[op++] = '\r';
output[op++] = '\n';
count = LINE_GROUPS;
}
}
// At this point either there is no tail, or there are fewer
// than 3 bytes of input available.
// The main loop, turning 3 input bytes into 4 output bytes on
// each iteration.
while (p+3 <= len) {
v = ((input[p] & 0xff) << 16) |
((input[p+1] & 0xff) << 8) |
(input[p+2] & 0xff);
output[op] = alphabet[(v >> 18) & 0x3f];
output[op+1] = alphabet[(v >> 12) & 0x3f];
output[op+2] = alphabet[(v >> 6) & 0x3f];
output[op+3] = alphabet[v & 0x3f];
p += 3;
op += 4;
if (--count == 0) {
if (do_cr) output[op++] = '\r';
output[op++] = '\n';
count = LINE_GROUPS;
}
}
if (finish) {
// Finish up the tail of the input. Note that we need to
// consume any bytes in tail before any bytes
// remaining in input; there should be at most two bytes
// total.
if (p-tailLen == len-1) {
int t = 0;
v = ((tailLen > 0 ? tail[t++] : input[p++]) & 0xff) << 4;
tailLen -= t;
output[op++] = alphabet[(v >> 6) & 0x3f];
output[op++] = alphabet[v & 0x3f];
if (do_padding) {
output[op++] = '=';
output[op++] = '=';
}
if (do_newline) {
if (do_cr) output[op++] = '\r';
output[op++] = '\n';
}
} else if (p-tailLen == len-2) {
int t = 0;
v = (((tailLen > 1 ? tail[t++] : input[p++]) & 0xff) << 10) |
(((tailLen > 0 ? tail[t++] : input[p++]) & 0xff) << 2);
tailLen -= t;
output[op++] = alphabet[(v >> 12) & 0x3f];
output[op++] = alphabet[(v >> 6) & 0x3f];
output[op++] = alphabet[v & 0x3f];
if (do_padding) {
output[op++] = '=';
}
if (do_newline) {
if (do_cr) output[op++] = '\r';
output[op++] = '\n';
}
} else if (do_newline && op > 0 && count != LINE_GROUPS) {
if (do_cr) output[op++] = '\r';
output[op++] = '\n';
}
assert tailLen == 0;
assert p == len;
} else {
// Save the leftovers in tail to be consumed on the next
// call to encodeInternal.
if (p == len-1) {
tail[tailLen++] = input[p];
} else if (p == len-2) {
tail[tailLen++] = input[p];
tail[tailLen++] = input[p+1];
}
}
this.op = op;
this.count = count;
return true;
}
}
private Base64Utils() { } // don't instantiate
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Gets the documentation versions of an API.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetDocumentationVersionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* [Required] The string identifier of the associated <a>RestApi</a>.
* </p>
*/
private String restApiId;
/**
* <p>
* The current pagination position in the paged result set.
* </p>
*/
private String position;
/**
* <p>
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
* </p>
*/
private Integer limit;
/**
* <p>
* [Required] The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @param restApiId
* [Required] The string identifier of the associated <a>RestApi</a>.
*/
public void setRestApiId(String restApiId) {
this.restApiId = restApiId;
}
/**
* <p>
* [Required] The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @return [Required] The string identifier of the associated <a>RestApi</a>.
*/
public String getRestApiId() {
return this.restApiId;
}
/**
* <p>
* [Required] The string identifier of the associated <a>RestApi</a>.
* </p>
*
* @param restApiId
* [Required] The string identifier of the associated <a>RestApi</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDocumentationVersionsRequest withRestApiId(String restApiId) {
setRestApiId(restApiId);
return this;
}
/**
* <p>
* The current pagination position in the paged result set.
* </p>
*
* @param position
* The current pagination position in the paged result set.
*/
public void setPosition(String position) {
this.position = position;
}
/**
* <p>
* The current pagination position in the paged result set.
* </p>
*
* @return The current pagination position in the paged result set.
*/
public String getPosition() {
return this.position;
}
/**
* <p>
* The current pagination position in the paged result set.
* </p>
*
* @param position
* The current pagination position in the paged result set.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDocumentationVersionsRequest withPosition(String position) {
setPosition(position);
return this;
}
/**
* <p>
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
* </p>
*
* @param limit
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
*/
public void setLimit(Integer limit) {
this.limit = limit;
}
/**
* <p>
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
* </p>
*
* @return The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
*/
public Integer getLimit() {
return this.limit;
}
/**
* <p>
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
* </p>
*
* @param limit
* The maximum number of returned results per page. The default value is 25 and the maximum value is 500.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetDocumentationVersionsRequest withLimit(Integer limit) {
setLimit(limit);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRestApiId() != null)
sb.append("RestApiId: ").append(getRestApiId()).append(",");
if (getPosition() != null)
sb.append("Position: ").append(getPosition()).append(",");
if (getLimit() != null)
sb.append("Limit: ").append(getLimit());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetDocumentationVersionsRequest == false)
return false;
GetDocumentationVersionsRequest other = (GetDocumentationVersionsRequest) obj;
if (other.getRestApiId() == null ^ this.getRestApiId() == null)
return false;
if (other.getRestApiId() != null && other.getRestApiId().equals(this.getRestApiId()) == false)
return false;
if (other.getPosition() == null ^ this.getPosition() == null)
return false;
if (other.getPosition() != null && other.getPosition().equals(this.getPosition()) == false)
return false;
if (other.getLimit() == null ^ this.getLimit() == null)
return false;
if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRestApiId() == null) ? 0 : getRestApiId().hashCode());
hashCode = prime * hashCode + ((getPosition() == null) ? 0 : getPosition().hashCode());
hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode());
return hashCode;
}
@Override
public GetDocumentationVersionsRequest clone() {
return (GetDocumentationVersionsRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.NodeUtils;
import org.apache.hadoop.hive.ql.exec.NodeUtils.Function;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.StatsTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskRunner;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handles the queue of tasks that should be executed by the driver.
*/
public class TaskQueue {
private static final Logger LOG = LoggerFactory.getLogger(Driver.class.getName());
private static final SessionState.LogHelper CONSOLE = new SessionState.LogHelper(LOG);
private static final int SLEEP_TIME = 2000;
private final Queue<Task<?>> runnable = new ConcurrentLinkedQueue<Task<?>>();;
private final List<TaskRunner> running = new ArrayList<TaskRunner>();
private final Map<String, StatsTask> statsTasks = new HashMap<>(1);
private final Context ctx;
// how many jobs have been started
private int curJobNo;
private boolean shutdown;
public TaskQueue() {
this(null);
}
public TaskQueue(Context ctx) {
this.ctx = ctx;
}
public synchronized boolean isShutdown() {
return shutdown;
}
public synchronized boolean isRunning() {
return !shutdown && (!running.isEmpty() || !runnable.isEmpty());
}
public synchronized void remove(Task<?> task) {
runnable.remove(task);
}
public synchronized void launching(TaskRunner runner) throws HiveException {
checkShutdown();
running.add(runner);
}
public synchronized Task<?> getRunnable(int maxthreads) throws HiveException {
checkShutdown();
if (runnable.peek() != null && running.size() < maxthreads) {
return runnable.remove();
}
return null;
}
public synchronized void releaseRunnable() {
//release the waiting poller.
notify();
}
/**
* Polls running tasks to see if a task has ended.
*
* @return The result object for any completed/failed task
*/
public synchronized TaskRunner pollFinished() throws InterruptedException {
while (!shutdown) {
Iterator<TaskRunner> it = running.iterator();
while (it.hasNext()) {
TaskRunner runner = it.next();
if (runner != null && !runner.isRunning()) {
it.remove();
return runner;
}
}
wait(SLEEP_TIME);
}
return null;
}
private void checkShutdown() throws HiveException {
if (shutdown) {
throw new HiveException("FAILED: Operation cancelled");
}
}
/**
* Cleans up remaining tasks in case of failure.
*/
public synchronized void shutdown() {
LOG.debug("Shutting down query " + ctx.getCmd());
shutdown = true;
for (TaskRunner runner : running) {
if (runner.isRunning()) {
Task<?> task = runner.getTask();
LOG.warn("Shutting down task : " + task);
try {
task.shutdown();
} catch (Exception e) {
CONSOLE.printError("Exception on shutting down task " + task.getId() + ": " + e);
}
Thread thread = runner.getRunner();
if (thread != null) {
thread.interrupt();
}
}
}
running.clear();
}
/**
* Checks if a task can be launched.
*
* @param tsk
* the task to be checked
* @return true if the task is launchable, false otherwise
*/
public static boolean isLaunchable(Task<?> tsk) {
// A launchable task is one that hasn't been queued, hasn't been
// initialized, and is runnable.
return tsk.isNotInitialized() && tsk.isRunnable();
}
public synchronized boolean addToRunnable(Task<?> tsk) throws HiveException {
if (runnable.contains(tsk)) {
return false;
}
checkShutdown();
runnable.add(tsk);
tsk.setQueued();
return true;
}
public int getCurJobNo() {
return curJobNo;
}
public void incCurJobNo(int amount) {
this.curJobNo = this.curJobNo + amount;
}
public void prepare(QueryPlan plan) {
// extract stats keys from StatsTask
List<Task<?>> rootTasks = plan.getRootTasks();
NodeUtils.iterateTask(rootTasks, StatsTask.class, new Function<StatsTask>() {
@Override
public void apply(StatsTask statsTask) {
if (statsTask.getWork().isAggregating()) {
statsTasks.put(statsTask.getWork().getAggKey(), statsTask);
}
}
});
}
public void finished(TaskRunner runner) {
if (statsTasks.isEmpty() || !(runner.getTask() instanceof MapRedTask)) {
return;
}
MapRedTask mapredTask = (MapRedTask) runner.getTask();
MapWork mapWork = mapredTask.getWork().getMapWork();
ReduceWork reduceWork = mapredTask.getWork().getReduceWork();
List<Operator> operators = new ArrayList<Operator>(mapWork.getAliasToWork().values());
if (reduceWork != null) {
operators.add(reduceWork.getReducer());
}
final List<String> statKeys = new ArrayList<String>(1);
NodeUtils.iterate(operators, FileSinkOperator.class, new Function<FileSinkOperator>() {
@Override
public void apply(FileSinkOperator fsOp) {
if (fsOp.getConf().isGatherStats()) {
statKeys.add(fsOp.getConf().getStatsAggPrefix());
}
}
});
for (String statKey : statKeys) {
if (statsTasks.containsKey(statKey)) {
statsTasks.get(statKey).getWork().setSourceTask(mapredTask);
} else {
LOG.debug("There is no correspoing statTask for: " + statKey);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.blob.datastore;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.commons.FileIOUtils;
import org.apache.jackrabbit.oak.plugins.blob.SharedDataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.BlobIdTracker.ActiveDeletionTracker;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.google.common.collect.Lists.newArrayList;
import static java.lang.String.valueOf;
import static java.util.UUID.randomUUID;
import static org.apache.jackrabbit.oak.commons.FileIOUtils.readStringsAsSet;
import static org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils.getBlobStore;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeNoException;
import static org.junit.Assume.assumeThat;
/**
* Test for BlobIdTracker.ActiveDeletionTracker to test tracking removed blob ids.
*/
public class ActiveDeletionTrackerStoreTest {
private static final Logger log = LoggerFactory.getLogger(ActiveDeletionTrackerStoreTest.class);
File root;
SharedDataStore dataStore;
ActiveDeletionTracker tracker;
@Rule
public TemporaryFolder folder = new TemporaryFolder(new File("target"));
private String repoId;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
try {
assumeThat(getBlobStore(), instanceOf(SharedDataStore.class));
} catch (Exception e) {
assumeNoException(e);
}
}
@Before
public void setup() throws Exception {
this.root = folder.newFolder();
if (dataStore == null) {
dataStore = getBlobStore(root);
}
this.repoId = randomUUID().toString();
this.tracker = initTracker();
}
private ActiveDeletionTracker initTracker() throws IOException {
return new ActiveDeletionTracker(root, repoId);
}
@After
public void tearDown() throws IOException {
folder.delete();
}
@Test
public void track() throws Exception {
Set<String> initAdd = add(tracker, range(0, 20), folder);
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after add snapshot", initAdd, retrieved);
}
@Test
public void filterWithNoActiveDeletion() throws Exception {
File toFilter = create(range(7, 10), folder);
Iterator<String> filtered = tracker.filter(toFilter);
assertEquals("incorrect elements after filtering", Sets.newHashSet(range(7, 10)), Sets.newHashSet(filtered));
}
@Test
public void filter() throws Exception {
add(tracker, range(0, 20), folder);
File toFilter = create(range(7, 10), folder);
Iterator<String> filtered = tracker.filter(toFilter);
assertTrue("More elements after filtering", Lists.newArrayList(filtered).isEmpty());
}
@Test
public void noFilter() throws Exception {
add(tracker, range(5, 20), folder);
List<String> toFilter = combine(range(7, 10), range(0, 4));
File toFilterFile = create(toFilter, folder);
Iterator<String> filtered = tracker.filter(toFilterFile);
assertEquals("Incorrect elements after filtering", range(0, 4), Lists.newArrayList(filtered));
}
@Test
public void filterWithExtraElements() throws Exception {
add(tracker, range(5, 25), folder);
List<String> toFilter = combine(range(7, 10), range(0, 4));
File toFilterFile = create(toFilter, folder);
Iterator<String> filtered = tracker.filter(toFilterFile);
assertEquals("Incorrect elements after filtering",
range(0, 4), Lists.newArrayList(filtered));
}
@Test
public void reconcileAll() throws Exception {
Set<String> initAdd = add(tracker, range(0, 20), folder);
List toReconcile = Lists.newArrayList();
File toFilter = create(toReconcile, folder);
tracker.reconcile(toFilter);
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after reconciliation", Sets.newHashSet(toReconcile), retrieved);
}
@Test
public void reconcileNone() throws Exception {
Set<String> initAdd = add(tracker, range(0, 20), folder);
List<String> toReconcile = range(0, 20);
File toFilter = create(toReconcile, folder);
tracker.reconcile(toFilter);
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after reconciliation", Sets.newHashSet(toReconcile), retrieved);
}
@Test
public void reconcile() throws Exception {
Set<String> initAdd = add(tracker, range(0, 20), folder);
List<String> toReconcile = combine(range(7, 10), range(1, 4));
File toFilter = create(toReconcile, folder);
tracker.reconcile(toFilter);
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after reconciliation", Sets.newHashSet(toReconcile), retrieved);
}
@Test
public void reconcileExtraElements() throws Exception {
Set<String> initAdd = add(tracker, range(0, 25), folder);
List<String> toReconcile = combine(range(7, 10), range(1, 4));
File toFilter = create(toReconcile, folder);
tracker.reconcile(toFilter);
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after reconciliation", Sets.newHashSet(toReconcile), retrieved);
}
@Test
public void addCloseRestart() throws IOException {
Set<String> initAdd = add(tracker, range(0, 10), folder);
this.tracker = initTracker();
Set<String> retrieved = retrieve(tracker, folder);
assertEquals("Incorrect elements after safe restart", initAdd, retrieved);
}
private static Set<String> add(ActiveDeletionTracker store, List<String> ints, TemporaryFolder folder) throws IOException {
File f = folder.newFile();
FileIOUtils.writeStrings(ints.iterator(), f, false);
store.track(f);
return Sets.newHashSet(ints);
}
private static File create(List<String> ints, TemporaryFolder folder) throws IOException {
File f = folder.newFile();
FileIOUtils.writeStrings(ints.iterator(), f, false);
return f;
}
private static Set<String> retrieve(ActiveDeletionTracker store, TemporaryFolder folder) throws IOException {
File f = folder.newFile();
Set<String> retrieved = readStringsAsSet(
new FileInputStream(store.retrieve(f.getAbsolutePath())), false);
return retrieved;
}
private static List<String> range(int min, int max) {
List<String> list = newArrayList();
for (int i = min; i <= max; i++) {
list.add(Strings.padStart(valueOf(i), 2, '0'));
}
return list;
}
private static List<String> combine(List<String> first, List<String> second) {
first.addAll(second);
Collections.sort(first, new Comparator<String>() {
@Override public int compare(String s1, String s2) {
return Integer.valueOf(s1).compareTo(Integer.valueOf(s2));
}
});
return first;
}
}
| |
package edu.mum.cs.ds.atm.ui;
import java.awt.EventQueue;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Panel;
import java.awt.SystemColor;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import edu.mum.cs.ds.atm.RequestHandlerFacade;
import edu.mum.cs.ds.atm.model.Request;
import edu.mum.cs.ds.atm.model.Response;
public class Dashboard {
public JFrame frame;
private JTable table;
private JTextField txtDepositamount;
private JTextArea textAreaPrint = new JTextArea();
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Dashboard window = new Dashboard();
window.frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public Dashboard() {
initialize();
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.setBounds(100, 100, 826, 767);
frame.setTitle("Dashboard");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
JPanel panel = new JPanel();
panel.setBackground(SystemColor.inactiveCaptionBorder);
panel.setBounds(12, 13, 784, 373);
frame.getContentPane().add(panel);
GridBagLayout gbl_panel = new GridBagLayout();
gbl_panel.columnWidths = new int[]{231, 123, 116, 73, 0};
gbl_panel.rowHeights = new int[]{22, 0, 0, 0, 0, 0, 0, 0, 0};
gbl_panel.columnWeights = new double[]{0.0, 0.0, 1.0, 0.0, Double.MIN_VALUE};
gbl_panel.rowWeights = new double[]{0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE};
panel.setLayout(gbl_panel);
JButton btnBalance = new JButton("Balance");
btnBalance.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Request request = new Request("checkBalance","MachineId","1234534232243","fairfield-branch","111111111");
RequestHandlerFacade requestHandlerFacade = new RequestHandlerFacade();
Response response = requestHandlerFacade.handleRequest(request);
StringBuilder sb = new StringBuilder();
sb.append(response.getMessage());
sb.append("\n");
sb.append(response.getRequest().getAmount());
textAreaPrint.setText(sb.toString());
}
});
btnBalance.setHorizontalAlignment(SwingConstants.LEFT);
GridBagConstraints gbc_btnBalance = new GridBagConstraints();
gbc_btnBalance.insets = new Insets(0, 0, 5, 5);
gbc_btnBalance.gridx = 0;
gbc_btnBalance.gridy = 2;
panel.add(btnBalance, gbc_btnBalance);
JButton btnBillPayment = new JButton("Bill Payment");
btnBillPayment.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
UnitilyPayment window = new UnitilyPayment();
window.frame.setVisible(true);
}
});
GridBagConstraints gbc_btnBillPayment = new GridBagConstraints();
gbc_btnBillPayment.insets = new Insets(0, 0, 5, 5);
gbc_btnBillPayment.gridx = 2;
gbc_btnBillPayment.gridy = 2;
panel.add(btnBillPayment, gbc_btnBillPayment);
JButton btnWithdraw = new JButton("Withdraw");
btnWithdraw.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Withdraw window = new Withdraw();
window.frame.setVisible(true);
}
});
GridBagConstraints gbc_btnWithdraw = new GridBagConstraints();
gbc_btnWithdraw.insets = new Insets(0, 0, 5, 5);
gbc_btnWithdraw.gridx = 0;
gbc_btnWithdraw.gridy = 4;
panel.add(btnWithdraw, gbc_btnWithdraw);
JButton btnChangePIN = new JButton("Change PIN");
btnChangePIN.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
ChangePIN window = new ChangePIN();
window.frame.setVisible(true);
}
});
GridBagConstraints gbc_btnChangePIN = new GridBagConstraints();
gbc_btnChangePIN.insets = new Insets(0, 0, 5, 5);
gbc_btnChangePIN.gridx = 2;
gbc_btnChangePIN.gridy = 4;
panel.add(btnChangePIN, gbc_btnChangePIN);
JButton btnDeposit = new JButton("Deposit");
btnDeposit.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Deposit window = new Deposit();
window.frame.setVisible(true);
}
});
GridBagConstraints gbc_btnDeposit = new GridBagConstraints();
gbc_btnDeposit.insets = new Insets(0, 0, 5, 5);
gbc_btnDeposit.gridx = 0;
gbc_btnDeposit.gridy = 6;
panel.add(btnDeposit, gbc_btnDeposit);
JPanel panel_1 = new JPanel();
panel_1.setBounds(12, 389, 391, 169);
frame.getContentPane().add(panel_1);
panel_1.setLayout(null);
JLabel lblNewLabel = new JLabel("Print");
lblNewLabel.setBounds(6, 6, 29, 16);
panel_1.add(lblNewLabel);
textAreaPrint.setText("--");
textAreaPrint.setBounds(6, 20, 379, 143);
panel_1.add(textAreaPrint);
JPanel panel_3 = new JPanel();
panel_3.setBounds(12, 559, 391, 161);
frame.getContentPane().add(panel_3);
panel_3.setLayout(null);
JLabel lblDeposit = new JLabel("Deposit");
lblDeposit.setBounds(6, 6, 49, 16);
panel_3.add(lblDeposit);
JLabel lblDepositAmount = new JLabel("Deposit Amount");
lblDepositAmount.setBounds(17, 59, 123, 16);
panel_3.add(lblDepositAmount);
txtDepositamount = new JTextField();
txtDepositamount.setText("0.0");
txtDepositamount.setBounds(128, 53, 134, 28);
panel_3.add(txtDepositamount);
txtDepositamount.setColumns(10);
Panel panel_2 = new Panel();
panel_2.setBackground(SystemColor.activeCaption);
panel_2.setBounds(405, 389, 391, 321);
frame.getContentPane().add(panel_2);
GridBagLayout gbl_panel_2 = new GridBagLayout();
gbl_panel_2.columnWidths = new int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
gbl_panel_2.rowHeights = new int[]{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
gbl_panel_2.columnWeights = new double[]{0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE};
gbl_panel_2.rowWeights = new double[]{0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE};
panel_2.setLayout(gbl_panel_2);
JButton button_11 = new JButton("1");
GridBagConstraints gbc_button_11 = new GridBagConstraints();
gbc_button_11.insets = new Insets(0, 0, 5, 5);
gbc_button_11.gridx = 2;
gbc_button_11.gridy = 1;
panel_2.add(button_11, gbc_button_11);
JButton button_10 = new JButton("2");
GridBagConstraints gbc_button_10 = new GridBagConstraints();
gbc_button_10.insets = new Insets(0, 0, 5, 5);
gbc_button_10.gridx = 4;
gbc_button_10.gridy = 1;
panel_2.add(button_10, gbc_button_10);
JButton button_9 = new JButton("3");
GridBagConstraints gbc_button_9 = new GridBagConstraints();
gbc_button_9.insets = new Insets(0, 0, 5, 5);
gbc_button_9.gridx = 6;
gbc_button_9.gridy = 1;
panel_2.add(button_9, gbc_button_9);
JButton button_8 = new JButton("4");
GridBagConstraints gbc_button_8 = new GridBagConstraints();
gbc_button_8.insets = new Insets(0, 0, 5, 5);
gbc_button_8.gridx = 2;
gbc_button_8.gridy = 3;
panel_2.add(button_8, gbc_button_8);
JButton button_7 = new JButton("5");
GridBagConstraints gbc_button_7 = new GridBagConstraints();
gbc_button_7.insets = new Insets(0, 0, 5, 5);
gbc_button_7.gridx = 4;
gbc_button_7.gridy = 3;
panel_2.add(button_7, gbc_button_7);
JButton button_6 = new JButton("6");
GridBagConstraints gbc_button_6 = new GridBagConstraints();
gbc_button_6.insets = new Insets(0, 0, 5, 5);
gbc_button_6.gridx = 6;
gbc_button_6.gridy = 3;
panel_2.add(button_6, gbc_button_6);
JButton button_5 = new JButton("7");
GridBagConstraints gbc_button_5 = new GridBagConstraints();
gbc_button_5.insets = new Insets(0, 0, 5, 5);
gbc_button_5.gridx = 2;
gbc_button_5.gridy = 5;
panel_2.add(button_5, gbc_button_5);
JButton button_4 = new JButton("8");
GridBagConstraints gbc_button_4 = new GridBagConstraints();
gbc_button_4.insets = new Insets(0, 0, 5, 5);
gbc_button_4.gridx = 4;
gbc_button_4.gridy = 5;
panel_2.add(button_4, gbc_button_4);
JButton button_3 = new JButton("9");
GridBagConstraints gbc_button_3 = new GridBagConstraints();
gbc_button_3.insets = new Insets(0, 0, 5, 5);
gbc_button_3.gridx = 6;
gbc_button_3.gridy = 5;
panel_2.add(button_3, gbc_button_3);
JButton button_2 = new JButton(".");
GridBagConstraints gbc_button_2 = new GridBagConstraints();
gbc_button_2.insets = new Insets(0, 0, 5, 5);
gbc_button_2.gridx = 2;
gbc_button_2.gridy = 7;
panel_2.add(button_2, gbc_button_2);
JButton button_1 = new JButton("0");
GridBagConstraints gbc_button_1 = new GridBagConstraints();
gbc_button_1.insets = new Insets(0, 0, 5, 5);
gbc_button_1.gridx = 4;
gbc_button_1.gridy = 7;
panel_2.add(button_1, gbc_button_1);
JButton button = new JButton("#");
GridBagConstraints gbc_button = new GridBagConstraints();
gbc_button.insets = new Insets(0, 0, 5, 5);
gbc_button.gridx = 6;
gbc_button.gridy = 7;
panel_2.add(button, gbc_button);
JButton btnEnter = new JButton("Enter");
GridBagConstraints gbc_btnEnter = new GridBagConstraints();
gbc_btnEnter.insets = new Insets(0, 0, 5, 5);
gbc_btnEnter.gridx = 2;
gbc_btnEnter.gridy = 9;
panel_2.add(btnEnter, gbc_btnEnter);
JButton btnCancel = new JButton("Cancel");
btnCancel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
MainWindow window = new MainWindow();
window.frame.setVisible(true);
}
});
GridBagConstraints gbc_btnCancel = new GridBagConstraints();
gbc_btnCancel.insets = new Insets(0, 0, 5, 5);
gbc_btnCancel.gridx = 6;
gbc_btnCancel.gridy = 9;
panel_2.add(btnCancel, gbc_btnCancel);
}
}
| |
/*
* Copyright (c) 2014 by Ernesto Carrella
* Licensed under MIT license. Basically do what you want with it but cite me and don't sue me. Which is just politeness, really.
* See the file "LICENSE" for more information
*/
package agents.firm.purchases;
import agents.EconomicAgent;
import agents.HasInventory;
import goods.InventoryListener;
import com.google.common.base.Preconditions;
import goods.GoodType;
import model.MacroII;
import model.utilities.ActionOrder;
import model.utilities.Deactivatable;
import model.utilities.scheduler.Priority;
import sim.engine.SimState;
import sim.engine.Steppable;
/**
* <h4>Description</h4>
* <p/> A very simple inventory listener I give to purchase departments to check inflow-outflow of a specific good
* <p/> it is started by the start of purchase department and turned off with the purchase department.
* <p/> It resets itself every day at dawn
* <h4>Notes</h4>
* Created with IntelliJ
* <p/>
* <p/>
* <h4>References</h4>
*
* @author carrknight
* @version 2013-02-13
* @see
*/
public class InflowOutflowCounter implements Deactivatable, InventoryListener, Steppable
{
boolean isActive = true;
/**
* used to schedule yourself
*/
final private MacroII model;
/**
* the agent whose inventory you are going to listen to
*/
final private EconomicAgent agent;
/**
* the type of good to listen to
*/
private final GoodType type;
/**
* total inflow since dawn
*/
private int yesterdayInflow=0;
/**
* total outflow since dawn
*/
private int yesterdayOutflow=0;
/**
* total inflow since dawn
*/
private int todayInflow;
/**
* total outflow since dawn
*/
private int todayOutflow;
/**
* counts how many times a plant wanted to consume a product but failed to do so because it wasn't available.
*/
private int todayFailuresToConsume;
/**
* called every dawn (and reschedules itself if active), resets all the counters
*/
public void restartAtDawn()
{
Preconditions.checkArgument(model.getCurrentPhase().equals(ActionOrder.DAWN));
if(isActive)
{
yesterdayInflow = todayInflow;
todayInflow = 0;
yesterdayOutflow = todayOutflow;
todayOutflow = 0;
todayFailuresToConsume = 0;
model.scheduleTomorrow(ActionOrder.DAWN,this, Priority.BEFORE_STANDARD);
}
}
/**
* start resetting and listening
*/
public void start()
{
assert isActive;
//schedule your first reset
model.scheduleSoon(ActionOrder.DAWN, this,Priority.BEFORE_STANDARD);
//add yourself as a listener
agent.addInventoryListener(this);
}
/**
* stop rescheduling yourself
*/
@Override
public void turnOff() {
isActive = false;
}
/**
* The step is each morning reset your data.
*/
@Override
public void step(SimState state) {
restartAtDawn();
}
/**
* This is called by the inventory to notify the listener that the quantity in the inventory has increased
*
* @param source the agent with the inventory that is calling the listener
* @param type which type of good has increased/decreased in numbers
* @param quantity how many goods do we have in the inventory now
* @param delta change in inventory (always positive)
*/
@Override
public void inventoryIncreaseEvent( HasInventory source, GoodType type, int quantity, int delta) {
if(this.type.equals(type))
{
Preconditions.checkArgument(delta > 0);
todayInflow+= delta;
}
}
/**
* This is called by the inventory to notify the listener that the quantity in the inventory has decreased
*
* @param source the agent with the inventory that is calling the listener
* @param type which type of good has increased/decreased in numbers
* @param quantity how many goods do we have in the inventory now
* @param delta change in inventory (always positive)
*/
@Override
public void inventoryDecreaseEvent( HasInventory source, GoodType type, int quantity,int delta) {
if(this.type.equals(type)) {
Preconditions.checkArgument(delta > 0);
todayOutflow+= delta;
}
}
/**
* This method is called by departments (plants usually) that need this input but found none. It is called
*
* @param source the agent with the inventory
* @param type the good type demanded
* @param numberNeeded how many goods were needed
*/
@Override
public void failedToConsumeEvent( HasInventory source, GoodType type, int numberNeeded) {
if(this.type.equals(type))
todayFailuresToConsume += numberNeeded;
}
/**
* Creates the counter. It is going to start listening at start()
* @param model the model (to reschedule yourself)
* @param agent the agent to listen to
* @param type the type of good you want to count inflow/outflow
*/
public InflowOutflowCounter(MacroII model, EconomicAgent agent, GoodType type) {
this.model = model;
this.agent = agent;
this.type = type;
}
/**
* Answers how many days, at the current rate, will it take for all the inventories to be gone
* @return If outflow > inflow it returns inventorySize/netOutflow, otherwise returns infinity
*/
public float currentDaysOfInventory()
{
float netOutflow = todayOutflow - todayInflow;
if( netOutflow > 0)
return agent.hasHowMany(type) / netOutflow;
else
return Float.POSITIVE_INFINITY;
}
/**
* Gets total outflow since dawn.
*
* @return Value of total outflow since dawn.
*/
public int getTodayOutflow() {
return todayOutflow;
}
/**
* Gets total inflow since dawn.
*
* @return Value of total inflow since dawn.
*/
public int getTodayInflow() {
return todayInflow;
}
public int getTodayFailuresToConsume() {
return todayFailuresToConsume;
}
public int getYesterdayInflow() {
return yesterdayInflow;
}
public int getYesterdayOutflow() {
return yesterdayOutflow;
}
}
| |
/*
* Copyright 2017 Yahoo Holdings, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.zms;
import java.util.List;
import com.yahoo.athenz.zms.store.ObjectStoreConnection;
import com.yahoo.athenz.zms.utils.ZMSUtils;
import com.yahoo.rdl.Timestamp;
class QuotaChecker {
private final Quota defaultQuota;
private boolean quotaCheckEnabled;
public QuotaChecker() {
// first check if the quota check is enabled or not
quotaCheckEnabled = Boolean.parseBoolean(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_CHECK, "true"));
// retrieve default quota values
int roleQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_ROLE, "1000"));
int roleMemberQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_ROLE_MEMBER, "100"));
int policyQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_POLICY, "1000"));
int assertionQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_ASSERTION, "100"));
int serviceQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_SERVICE, "250"));
int serviceHostQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_SERVICE_HOST, "10"));
int publicKeyQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_PUBLIC_KEY, "100"));
int entityQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_ENTITY, "100"));
int subDomainQuota = Integer.parseInt(System.getProperty(ZMSConsts.ZMS_PROP_QUOTA_SUBDOMAIN, "100"));
defaultQuota = new Quota().setName("server-default")
.setAssertion(assertionQuota).setEntity(entityQuota)
.setPolicy(policyQuota).setPublicKey(publicKeyQuota)
.setRole(roleQuota).setRoleMember(roleMemberQuota)
.setService(serviceQuota).setServiceHost(serviceHostQuota)
.setSubdomain(subDomainQuota).setModified(Timestamp.fromCurrentTime());
}
public Quota getDomainQuota(ObjectStoreConnection con, String domainName) {
Quota quota = con.getQuota(domainName);
return (quota == null) ? defaultQuota : quota;
}
void setQuotaCheckEnabled(boolean quotaCheckEnabled) {
this.quotaCheckEnabled = quotaCheckEnabled;
}
int getListSize(List<?> list) {
return (list == null) ? 0 : list.size();
}
void checkSubdomainQuota(ObjectStoreConnection con, String domainName, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// for sub-domains we need to run the quota check against
// the top level domain so let's get that first. If we are
// creating a top level domain then there is no need for
// quota check
int idx = domainName.indexOf('.');
if (idx == -1) {
return;
}
final String topLevelDomain = domainName.substring(0, idx);
// now get the quota for the top level domain
final Quota quota = getDomainQuota(con, topLevelDomain);
// get the list of sub-domains for our given top level domain
final String domainPrefix = topLevelDomain + ".";
int objectCount = con.listDomains(domainPrefix, 0).size() + 1;
if (quota.getSubdomain() < objectCount) {
throw ZMSUtils.quotaLimitError("subdomain quota exceeded - limit: "
+ quota.getSubdomain() + " actual: " + objectCount, caller);
}
}
void checkRoleQuota(ObjectStoreConnection con, String domainName, Role role, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// if our role is null then there is no quota check
if (role == null) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// first we're going to verify the elements that do not
// require any further data from the object store
int objectCount = getListSize(role.getRoleMembers());
if (quota.getRoleMember() < objectCount) {
throw ZMSUtils.quotaLimitError("role member quota exceeded - limit: "
+ quota.getRoleMember() + " actual: " + objectCount, caller);
}
// now we're going to check if we'll be allowed
// to create this role in the domain
objectCount = con.countRoles(domainName) + 1;
if (quota.getRole() < objectCount) {
throw ZMSUtils.quotaLimitError("role quota exceeded - limit: "
+ quota.getRole() + " actual: " + objectCount, caller);
}
}
void checkRoleMembershipQuota(ObjectStoreConnection con, String domainName,
String roleName, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// now check to make sure we can add 1 more member
// to this role without exceeding the quota
int objectCount = con.countRoleMembers(domainName, roleName) + 1;
if (quota.getRoleMember() < objectCount) {
throw ZMSUtils.quotaLimitError("role member quota exceeded - limit: "
+ quota.getRoleMember() + " actual: " + objectCount, caller);
}
}
void checkPolicyQuota(ObjectStoreConnection con, String domainName, Policy policy, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// if our policy is null then there is no quota check
if (policy == null) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// first we're going to verify the elements that do not
// require any further data from the object store
int objectCount = getListSize(policy.getAssertions());
if (quota.getAssertion() < objectCount) {
throw ZMSUtils.quotaLimitError("policy assertion quota exceeded - limit: "
+ quota.getAssertion() + " actual: " + objectCount, caller);
}
// now we're going to check if we'll be allowed
// to create this policy in the domain
objectCount = con.countPolicies(domainName) + 1;
if (quota.getPolicy() < objectCount) {
throw ZMSUtils.quotaLimitError("policy quota exceeded - limit: "
+ quota.getPolicy() + " actual: " + objectCount, caller);
}
}
void checkPolicyAssertionQuota(ObjectStoreConnection con, String domainName,
String policyName, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// now check to make sure we can add 1 more assertion
// to this policy without exceeding the quota
int objectCount = con.countAssertions(domainName, policyName) + 1;
if (quota.getAssertion() < objectCount) {
throw ZMSUtils.quotaLimitError("policy assertion quota exceeded - limit: "
+ quota.getAssertion() + " actual: " + objectCount, caller);
}
}
void checkServiceIdentityQuota(ObjectStoreConnection con, String domainName,
ServiceIdentity service, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// if our service is null then there is no quota check
if (service == null) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// first we're going to verify the elements that do not
// require any further data from the object store
int objectCount = getListSize(service.getHosts());
if (quota.getServiceHost() < objectCount) {
throw ZMSUtils.quotaLimitError("service host quota exceeded - limit: "
+ quota.getServiceHost() + " actual: " + objectCount, caller);
}
objectCount = getListSize(service.getPublicKeys());
if (quota.getPublicKey() < objectCount) {
throw ZMSUtils.quotaLimitError("service public key quota exceeded - limit: "
+ quota.getPublicKey() + " actual: " + objectCount, caller);
}
// now we're going to check if we'll be allowed
// to create this service in the domain
objectCount = con.countServiceIdentities(domainName) + 1;
if (quota.getService() < objectCount) {
throw ZMSUtils.quotaLimitError("service quota exceeded - limit: "
+ quota.getService() + " actual: " + objectCount, caller);
}
}
void checkServiceIdentityPublicKeyQuota(ObjectStoreConnection con, String domainName,
String serviceName, String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// now check to make sure we can add 1 more public key
// to this policy without exceeding the quota
int objectCount = con.countPublicKeys(domainName, serviceName) + 1;
if (quota.getPublicKey() < objectCount) {
throw ZMSUtils.quotaLimitError("service public key quota exceeded - limit: "
+ quota.getPublicKey() + " actual: " + objectCount, caller);
}
}
void checkEntityQuota(ObjectStoreConnection con, String domainName, Entity entity,
String caller) {
// if quota check is disabled we have nothing to do
if (!quotaCheckEnabled) {
return;
}
// if our entity is null then there is no quota check
if (entity == null) {
return;
}
// first retrieve the domain quota
final Quota quota = getDomainQuota(con, domainName);
// we're going to check if we'll be allowed
// to create this entity in the domain
int objectCount = con.countEntities(domainName) + 1;
if (quota.getEntity() < objectCount) {
throw ZMSUtils.quotaLimitError("entity quota exceeded - limit: "
+ quota.getEntity() + " actual: " + objectCount, caller);
}
}
}
| |
package data;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.commons.collections15.Transformer;
import org.la4j.iterator.MatrixIterator;
import org.la4j.Matrix;
import org.la4j.matrix.ColumnMajorSparseMatrix;
import org.la4j.matrix.sparse.CCSMatrix;
import org.la4j.matrix.sparse.CRSMatrix;
public class Matrixla4j extends CRSMatrix implements AbstractMatrix{
public Matrixla4j(int vertexCount, int edgeCount) {
super(vertexCount, edgeCount);
}
@Override
public Matrix blankOfShape(int rows, int columns) {
return new Matrixla4j(rows, columns);
}
static double log(double x){
return x==0?0:Math.log(x+1);
}
public <M extends AbstractMatrix> double[] pByTSumPowSum() {
double[] res = {0,0};
List<Integer> nzRows = new ArrayList<Integer>();
Iterator<Integer> it = this.iteratorOfNonZeroRows();
while (it.hasNext()) {
nzRows.add(it.next());
}
double tmp;
for (int i: nzRows) {
for (int j: nzRows) {
tmp = nonZeroIteratorOfRow(i)
.innerProduct(nonZeroIteratorOfRow(j));
res[0] += tmp;
res[1] += tmp*tmp;
// result.set(i, j, a.nonZeroIteratorOfRow(i)
// .innerProduct(a.nonZeroIteratorOfRow(j)));
}
}
return res;
}
@Override
public <M extends AbstractMatrix> double[] pByTSumPowSumDiffPowSum(M b, boolean diag) {
double[] res = {0,0,0,0,0,0,0,
0,0,0 };//,0,0,0,0};
//0:max1, 1:sum1, 2:powsu1, 3:max2, 4:sum2, 5:powsum2, 6:powsumDiff12,
//7:Tr1, 8:Tr2, 9:timessum12,
//10:logsu1, 11: logsu2, 12: logsumdiff12, 13: timeslog12
Matrixla4j B = (Matrixla4j)b;
long startTime = System.currentTimeMillis();
Set<Integer> nzRows = new HashSet<Integer>();
Iterator<Integer> it = this.iteratorOfNonZeroRows();
while (it.hasNext()) {
nzRows.add(it.next());
}
double tmp;
for (int i: nzRows) {
for (int j: nzRows) if(i!=j || diag)
{
tmp = nonZeroIteratorOfRow(i)
.innerProduct(nonZeroIteratorOfRow(j));
if (tmp>res[0]) res[0]=tmp;
res[1] += tmp;
res[2] += tmp*tmp;
// res[10] += tmp*log(tmp);
if (i==j) res[7]+=tmp;
}
}
// System.err.println("\t\t >> -- su2(U): " + (System.currentTimeMillis() - startTime) +" milisecond");
startTime = System.currentTimeMillis();
Set<Integer> nzRowsB = new HashSet<Integer>();
Iterator<Integer> itB = (B).iteratorOfNonZeroRows();
while (itB.hasNext()) {
nzRowsB.add(itB.next());
}
for (int i: nzRowsB) {
for (int j: nzRowsB) if(i!=j || diag)
{
tmp = B.nonZeroIteratorOfRow(i)
.innerProduct(B.nonZeroIteratorOfRow(j));
if (tmp>res[3]) res[3]=tmp;
res[4] += tmp;
res[5] += tmp*tmp;
// res[11] += tmp*log(tmp);
if (i==j) res[8]+=tmp;
}
}
// System.err.println("\t\t >> -- su2(V): " + (System.currentTimeMillis() - startTime) +" milisecond");
startTime = System.currentTimeMillis();
nzRows.retainAll(nzRowsB);
double tmp2= 0;
for (int i: nzRows) {
for (int j: nzRows) if(i!=j || diag)
{
tmp = nonZeroIteratorOfRow(i)
.innerProduct(nonZeroIteratorOfRow(j));
tmp2 = B.nonZeroIteratorOfRow(i)
.innerProduct(B.nonZeroIteratorOfRow(j));
res[9] += tmp*tmp2;
// res[13] += tmp*log(tmp2)+tmp2*log(tmp);
}
}
// System.err.println("\t\t >> -- su2(U.V): " + (System.currentTimeMillis() - startTime) +" milisecond");
res[6] = res[2]+res[5] - 2*res[9] ;
// res[12] = res[10]+res[11] - res[13] ;
return res;
}
@SuppressWarnings("unchecked")
@Override
public <M extends AbstractMatrix> M productByItsTranspose() {
M res;
// System.err.println(">>>>>>M "+ rows()+"x"+columns()+" : " + getNonZeroCount());
// long tmp = System.nanoTime();
// ColumnMajorSparseMatrix Tr = (ColumnMajorSparseMatrix) this.transpose();
// res = (M) this.multiply(Tr);
// long t1 = (System.nanoTime() - tmp);
// tmp= System.nanoTime();
res = (M) this.multiplyByItsTranspose();
// long t2 = (System.nanoTime() - tmp);
// System.err.println("----- MT toook: " + " " + t2 );
return res;
}
@SuppressWarnings("unchecked")
@Override
public <M extends AbstractMatrix> M elementwiseProduct(M b) {
return (M) this.hadamardProduct((Matrix)b);
}
@SuppressWarnings("unchecked")
@Override
public <M extends AbstractMatrix> M transposeProduct(M b) {
ColumnMajorSparseMatrix columnMajorSparseMatrix= (ColumnMajorSparseMatrix) this.transpose();
columnMajorSparseMatrix.multiply((Matrix)b);
return (M) this.transpose().multiply((Matrix)b);
}
@Override
public int getNonZeroCount() {
return this.cardinality();
}
@Override
public void divideInPlace(double m) {
MatrixIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
it.set(x/m);
}
}
@Override
public void applyFunctionInPlace(final Transformer<Double, Double> f) {
MatrixIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
it.set(f.transform(x));
}
// MatrixProcedure procedure = new MatrixProcedure() {
// @Override
// public void apply(int i, int j, double value) {
// set(i,j,f.transform(value));
// }
// };
// this.eachNonZero(procedure );
}
@Override
public void zeroDiagonalsInPlace() {
// Is better if number of non zero elements < n log n
// MatrixIterator it = nonZeroIterator();
// while (it.hasNext()) {
// double x= it.next();
// int i = it.rowIndex(), j = it.columnIndex();
// if (i==j) {
// it.set(0);
// }
// }
for(int i =0; i<this.rows();i++)
this.set(i, i, 0);
}
@Override
public double powSum(int p) {
double res =0;
MatrixIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
res+=Math.pow(x, p);
}
return res;
}
@SuppressWarnings("unchecked")
@Override
public <V extends AbstractVector> V getValueFrequencies(boolean countDiagonals) {
double[] freq = new double[(int)max()+1];
MatrixIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
freq[(int)x]++;
}
Vectorla4j res = new Vectorla4j(freq);
double pairCount = rows()*columns() - (countDiagonals?0 :rows());
res.set(0, pairCount-res.sum());
return (V) res;
}
@SuppressWarnings("unchecked")
@Override
public <V extends AbstractVector> V getElemetMarginalSum(int dimension) {
V res =null ;
if(dimension ==1){
res = (V) new Vectorla4j(this.rows());
for (int i = 0; i < res.length(); i++) {
res.set(i,getRow(i).sum());
}
}else if (dimension ==0){
Matrix tmp = this.toColumnMajorSparseMatrix();
res = (V) new Vectorla4j(tmp.columns());
for (int i = 0; i < res.length(); i++) {
res.set(i,tmp.getColumn(i).sum());
}
}
return res;
}
@Override
public <M extends AbstractMatrix> M subtract(M b) {
// System.err.println(" A = \n"+ this);
// System.err.println(" B = \n"+ b);
Matrix result = this.subtract((Matrix)b);
// System.err.println(" A-B = \n"+ result);
return (M)result;
// return (M) this.add(((Matrix)b).multiply(-1));
// System.err.println(" A = \n"+ this);
// System.err.println(" B = \n"+ b);
//
// if (b instanceof Matrixla4j){
// Matrixla4j B = (Matrixla4j)b;
// Matrix result = this.blank();
// MatrixIterator these = this.nonZeroRowMajorIterator();
// MatrixIterator those = B.nonZeroRowMajorIterator();
// MatrixIterator both = these.orElseSubtract(those);
//
// while (both.hasNext()) {
// double x = both.next();
// int i = both.rowIndex();
// int j = both.columnIndex();
// result.set(i, j, x);
// }
// System.err.println("--------> "+this.get(1, 4)+" - "+B.get(1, 4)+" = "+result.get(1, 4));
//
// System.err.println(" A-B = \n"+ result);
//
// return (M) result;
// }
// return null;//(M) this.subtract((Matrix)b);
}
}
//
// @Override
// public AVector getValueFrequencies() {
// double[] freq = new double[(int)max()+1];
// MatrixIterator it = nonZeroIterator();
// while (it.hasNext()) {
// double x = it.next();
// freq[(int)x]++;
// }
// return new Vectorla4j(freq);
//// class FreqProce implements MatrixProcedure{
//// public int[] freq = new int[(int)max()+1];
//// @Override
//// public void apply(int i, int j, double value) {
//// freq[(int)value]++;
//// }
//// };
//// FreqProce procedure = new FreqProce() ;
//// this.eachNonZero(procedure );
//// return procedure.freq;
// }
//@Override
//public AVector getElemetMarginalSum(int dimension) {
// double[] freq = new double[(int)max()+1];
// MatrixIterator it = nonZeroIterator();
// while (it.hasNext()) {
// double x = it.next();
// freq[(int)x]++;
// }
// return new Vectorla4j(freq);
//}
//@Override
//public Matrixla4j elementwiseProduct(Matrixla4j b) {
// return (Matrixla4j) this.hadamardProduct(b);
//}
//@Override
//public Matrixla4j productByItsTranspose() {
// return (Matrixla4j)
//}
//@Override
//public Matrixla4j getTranspose() {
// return (Matrixla4j) this.transpose();
//}
//@Override
//public Matrixla4j transposeProduct(Matrixla4j b) {
// return (Matrixla4j) this.getTranspose().multiply(b);
//}
//@Override
//public int rows() {
// return this.rows();
//}
//@Override
//public int columns() {
// return this.columns();
//}
//
//@Override
//public void set(int i, int j, double value) {
// this.set(i, j, value);
//}
//
//@Override
//public double trace() {
// return this.trace();
//}
//
//@Override
//public double sum() {
// return this.sum();
//}
//
//@Override
//public double max() {
// return this.max();
//}
| |
/*
* Copyright 1999,2004-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.tribes.membership;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Arrays;
import org.apache.catalina.tribes.Member;
import org.apache.catalina.tribes.io.XByteBuffer;
import org.apache.catalina.tribes.transport.SenderState;
/**
* A <b>membership</b> implementation using simple multicast.
* This is the representation of a multicast member.
* Carries the host, and port of the this or other cluster nodes.
*
* @author Filip Hanik
* @version $Revision: 304032 $, $Date: 2005-07-27 10:11:55 -0500 (Wed, 27 Jul 2005) $
*/
public class MemberImpl implements Member, java.io.Externalizable {
/**
* Public properties specific to this implementation
*/
public static final transient String TCP_LISTEN_PORT = "tcpListenPort";
public static final transient String TCP_LISTEN_HOST = "tcpListenHost";
public static final transient String MEMBER_NAME = "memberName";
public static final transient byte[] TRIBES_MBR_BEGIN = new byte[] {84, 82, 73, 66, 69, 83, 45, 66};
public static final transient byte[] TRIBES_MBR_END = new byte[] {84, 82, 73, 66, 69, 83, 45, 69};
/**
* The listen host for this member
*/
protected byte[] host;
protected transient String hostname;
/**
* The tcp listen port for this member
*/
protected int port;
/**
* The tcp/SSL listen port for this member
*/
protected int securePort = -1;
/**
* Counter for how many broadcast messages have been sent from this member
*/
protected int msgCount = 0;
/**
* The number of milliseconds since this members was
* created, is kept track of using the start time
*/
protected long memberAliveTime = 0;
/**
* For the local member only
*/
protected transient long serviceStartTime;
/**
* To avoid serialization over and over again, once the local dataPkg
* has been set, we use that to transmit data
*/
protected transient byte[] dataPkg = null;
/**
* Unique session Id for this member
*/
protected byte[] uniqueId = new byte[16];
/**
* Custom payload that an app framework can broadcast
* Also used to transport stop command.
*/
protected byte[] payload = new byte[0];
/**
* Command, so that the custom payload doesn't have to be used
* This is for internal tribes use, such as SHUTDOWN_COMMAND
*/
protected byte[] command = new byte[0];
/**
* Domain if we want to filter based on domain.
*/
protected byte[] domain = new byte[0];
/**
* Empty constructor for serialization
*/
public MemberImpl() {
}
/**
* Construct a new member object
* @param name - the name of this member, cluster unique
* @param domain - the cluster domain name of this member
* @param host - the tcp listen host
* @param port - the tcp listen port
*/
public MemberImpl(String host,
int port,
long aliveTime) throws IOException {
setHostname(host);
this.port = port;
this.memberAliveTime=aliveTime;
}
public MemberImpl(String host,
int port,
long aliveTime,
byte[] payload) throws IOException {
this(host,port,aliveTime);
setPayload(payload);
}
public boolean isReady() {
return SenderState.getSenderState(this).isReady();
}
public boolean isSuspect() {
return SenderState.getSenderState(this).isSuspect();
}
public boolean isFailing() {
return SenderState.getSenderState(this).isFailing();
}
/**
* Increment the message count.
*/
protected void inc() {
msgCount++;
}
/**
* Create a data package to send over the wire representing this member.
* This is faster than serialization.
* @return - the bytes for this member deserialized
* @throws Exception
*/
public byte[] getData() {
return getData(true);
}
/**
* Highly optimized version of serializing a member into a byte array
* Returns a cached byte[] reference, do not modify this data
* @param getalive boolean
* @return byte[]
*/
public byte[] getData(boolean getalive) {
return getData(getalive,false);
}
public int getDataLength() {
return TRIBES_MBR_BEGIN.length+ //start pkg
4+ //data length
8+ //alive time
4+ //port
4+ //secure port
1+ //host length
host.length+ //host
4+ //command length
command.length+ //command
4+ //domain length
domain.length+ //domain
16+ //unique id
4+ //payload length
payload.length+ //payload
TRIBES_MBR_END.length; //end pkg
}
/**
*
* @param getalive boolean - calculate memberAlive time
* @param reset boolean - reset the cached data package, and create a new one
* @return byte[]
*/
public byte[] getData(boolean getalive, boolean reset) {
if ( reset ) dataPkg = null;
//look in cache first
if ( dataPkg!=null ) {
if ( getalive ) {
//you'd be surprised, but System.currentTimeMillis
//shows up on the profiler
long alive=System.currentTimeMillis()-getServiceStartTime();
XByteBuffer.toBytes( (long) alive, dataPkg, TRIBES_MBR_BEGIN.length+4);
}
return dataPkg;
}
//package looks like
//start package TRIBES_MBR_BEGIN.length
//package length - 4 bytes
//alive - 8 bytes
//port - 4 bytes
//secure port - 4 bytes
//host length - 1 byte
//host - hl bytes
//clen - 4 bytes
//command - clen bytes
//dlen - 4 bytes
//domain - dlen bytes
//uniqueId - 16 bytes
//payload length - 4 bytes
//payload plen bytes
//end package TRIBES_MBR_END.length
byte[] addr = host;
long alive=System.currentTimeMillis()-getServiceStartTime();
byte hl = (byte)addr.length;
byte[] data = new byte[getDataLength()];
int bodylength = (getDataLength() - TRIBES_MBR_BEGIN.length - TRIBES_MBR_END.length - 4);
int pos = 0;
//TRIBES_MBR_BEGIN
System.arraycopy(TRIBES_MBR_BEGIN,0,data,pos,TRIBES_MBR_BEGIN.length);
pos += TRIBES_MBR_BEGIN.length;
//body length
XByteBuffer.toBytes(bodylength,data,pos);
pos += 4;
//alive data
XByteBuffer.toBytes((long)alive,data,pos);
pos += 8;
//port
XByteBuffer.toBytes(port,data,pos);
pos += 4;
//secure port
XByteBuffer.toBytes(securePort,data,pos);
pos += 4;
//host length
data[pos++] = hl;
//host
System.arraycopy(addr,0,data,pos,addr.length);
pos+=addr.length;
//clen - 4 bytes
XByteBuffer.toBytes(command.length,data,pos);
pos+=4;
//command - clen bytes
System.arraycopy(command,0,data,pos,command.length);
pos+=command.length;
//dlen - 4 bytes
XByteBuffer.toBytes(domain.length,data,pos);
pos+=4;
//domain - dlen bytes
System.arraycopy(domain,0,data,pos,domain.length);
pos+=domain.length;
//unique Id
System.arraycopy(uniqueId,0,data,pos,uniqueId.length);
pos+=uniqueId.length;
//payload
XByteBuffer.toBytes(payload.length,data,pos);
pos+=4;
System.arraycopy(payload,0,data,pos,payload.length);
pos+=payload.length;
//TRIBES_MBR_END
System.arraycopy(TRIBES_MBR_END,0,data,pos,TRIBES_MBR_END.length);
pos += TRIBES_MBR_END.length;
//create local data
dataPkg = data;
return data;
}
/**
* Deserializes a member from data sent over the wire
* @param data - the bytes received
* @return a member object.
*/
public static MemberImpl getMember(byte[] data, MemberImpl member) {
return getMember(data,0,data.length,member);
}
public static MemberImpl getMember(byte[] data, int offset, int length, MemberImpl member) {
//package looks like
//start package TRIBES_MBR_BEGIN.length
//package length - 4 bytes
//alive - 8 bytes
//port - 4 bytes
//secure port - 4 bytes
//host length - 1 byte
//host - hl bytes
//clen - 4 bytes
//command - clen bytes
//dlen - 4 bytes
//domain - dlen bytes
//uniqueId - 16 bytes
//payload length - 4 bytes
//payload plen bytes
//end package TRIBES_MBR_END.length
int pos = offset;
if (XByteBuffer.firstIndexOf(data,offset,TRIBES_MBR_BEGIN)!=pos) {
throw new IllegalArgumentException("Invalid package, should start with:"+org.apache.catalina.tribes.util.Arrays.toString(TRIBES_MBR_BEGIN));
}
if ( length < (TRIBES_MBR_BEGIN.length+4) ) {
throw new ArrayIndexOutOfBoundsException("Member package to small to validate.");
}
pos += TRIBES_MBR_BEGIN.length;
int bodylength = XByteBuffer.toInt(data,pos);
pos += 4;
if ( length < (bodylength+4+TRIBES_MBR_BEGIN.length+TRIBES_MBR_END.length) ) {
throw new ArrayIndexOutOfBoundsException("Not enough bytes in member package.");
}
int endpos = pos+bodylength;
if (XByteBuffer.firstIndexOf(data,endpos,TRIBES_MBR_END)!=endpos) {
throw new IllegalArgumentException("Invalid package, should end with:"+org.apache.catalina.tribes.util.Arrays.toString(TRIBES_MBR_END));
}
byte[] alived = new byte[8];
System.arraycopy(data, pos, alived, 0, 8);
pos += 8;
byte[] portd = new byte[4];
System.arraycopy(data, pos, portd, 0, 4);
pos += 4;
byte[] sportd = new byte[4];
System.arraycopy(data, pos, sportd, 0, 4);
pos += 4;
byte hl = data[pos++];
byte[] addr = new byte[hl];
System.arraycopy(data, pos, addr, 0, hl);
pos += hl;
int cl = XByteBuffer.toInt(data, pos);
pos += 4;
byte[] command = new byte[cl];
System.arraycopy(data, pos, command, 0, command.length);
pos += command.length;
int dl = XByteBuffer.toInt(data, pos);
pos += 4;
byte[] domain = new byte[dl];
System.arraycopy(data, pos, domain, 0, domain.length);
pos += domain.length;
byte[] uniqueId = new byte[16];
System.arraycopy(data, pos, uniqueId, 0, 16);
pos += 16;
int pl = XByteBuffer.toInt(data, pos);
pos += 4;
byte[] payload = new byte[pl];
System.arraycopy(data, pos, payload, 0, payload.length);
pos += payload.length;
member.setHost(addr);
member.setPort(XByteBuffer.toInt(portd, 0));
member.setSecurePort(XByteBuffer.toInt(sportd, 0));
member.setMemberAliveTime(XByteBuffer.toLong(alived, 0));
member.setUniqueId(uniqueId);
member.payload = payload;
member.domain = domain;
member.command = command;
member.dataPkg = new byte[length];
System.arraycopy(data, offset, member.dataPkg, 0, length);
return member;
}
public static MemberImpl getMember(byte[] data) {
return getMember(data,new MemberImpl());
}
/**
* Return the name of this object
* @return a unique name to the cluster
*/
public String getName() {
return "tcp://"+getHostname()+":"+getPort();
}
/**
* Return the listen port of this member
* @return - tcp listen port
*/
public int getPort() {
return this.port;
}
/**
* Return the TCP listen host for this member
* @return IP address or host name
*/
public byte[] getHost() {
return host;
}
public String getHostname() {
if ( this.hostname != null ) return hostname;
else {
try {
this.hostname = java.net.InetAddress.getByAddress(host).getHostName();
return this.hostname;
}catch ( IOException x ) {
throw new RuntimeException("Unable to parse hostname.",x);
}
}
}
/**
* Contains information on how long this member has been online.
* The result is the number of milli seconds this member has been
* broadcasting its membership to the cluster.
* @return nr of milliseconds since this member started.
*/
public long getMemberAliveTime() {
return memberAliveTime;
}
public long getServiceStartTime() {
return serviceStartTime;
}
public byte[] getUniqueId() {
return uniqueId;
}
public byte[] getPayload() {
return payload;
}
public byte[] getCommand() {
return command;
}
public byte[] getDomain() {
return domain;
}
public int getSecurePort() {
return securePort;
}
public void setMemberAliveTime(long time) {
memberAliveTime=time;
}
/**
* String representation of this object
*/
public String toString() {
StringBuffer buf = new StringBuffer("org.apache.catalina.tribes.membership.MemberImpl[");
buf.append(getName()).append(",");
buf.append(getHostname()).append(",");
buf.append(port).append(", alive=");
buf.append(memberAliveTime).append(",");
buf.append("id=").append(bToS(this.uniqueId)).append(", ");
buf.append("payload=").append(bToS(this.payload,8)).append(", ");
buf.append("command=").append(bToS(this.command,8)).append(", ");
buf.append("domain=").append(bToS(this.domain,8)).append(", ");
buf.append("]");
return buf.toString();
}
public static String bToS(byte[] data) {
return bToS(data,data.length);
}
public static String bToS(byte[] data, int max) {
StringBuffer buf = new StringBuffer(4*16);
buf.append("{");
for (int i=0; data!=null && i<data.length; i++ ) {
buf.append(String.valueOf(data[i])).append(" ");
if ( i==max ) {
buf.append("...("+data.length+")");
break;
}
}
buf.append("}");
return buf.toString();
}
/**
* @see java.lang.Object#hashCode()
* @return The hash code
*/
public int hashCode() {
return getHost()[0]+getHost()[1]+getHost()[2]+getHost()[3];
}
/**
* Returns true if the param o is a McastMember with the same name
* @param o
*/
public boolean equals(Object o) {
if ( o instanceof MemberImpl ) {
return Arrays.equals(this.getHost(),((MemberImpl)o).getHost()) &&
this.getPort() == ((MemberImpl)o).getPort() &&
Arrays.equals(this.getUniqueId(),((MemberImpl)o).getUniqueId());
}
else
return false;
}
public void setHost(byte[] host) {
this.host = host;
}
public void setHostname(String host) throws IOException {
hostname = host;
this.host = java.net.InetAddress.getByName(host).getAddress();
}
public void setMsgCount(int msgCount) {
this.msgCount = msgCount;
}
public void setPort(int port) {
this.port = port;
this.dataPkg = null;
}
public void setServiceStartTime(long serviceStartTime) {
this.serviceStartTime = serviceStartTime;
}
public void setUniqueId(byte[] uniqueId) {
this.uniqueId = uniqueId!=null?uniqueId:new byte[16];
getData(true,true);
}
public void setPayload(byte[] payload) {
byte[] oldpayload = this.payload;
this.payload = payload!=null?payload:new byte[0];
if ( this.getData(true,true).length > McastServiceImpl.MAX_PACKET_SIZE ) {
this.payload = oldpayload;
throw new IllegalArgumentException("Payload is to large for tribes to handle.");
}
}
public void setCommand(byte[] command) {
this.command = command!=null?command:new byte[0];
getData(true,true);
}
public void setDomain(byte[] domain) {
this.domain = domain!=null?domain:new byte[0];
getData(true,true);
}
public void setSecurePort(int securePort) {
this.securePort = securePort;
}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
int length = in.readInt();
byte[] message = new byte[length];
in.read(message);
getMember(message,this);
}
public void writeExternal(ObjectOutput out) throws IOException {
byte[] data = this.getData();
out.writeInt(data.length);
out.write(data);
}
}
| |
package com.taxonic.carml.engine.rdf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.jupiter.api.Assertions.assertThrows;
import com.taxonic.carml.engine.RmlMapperException;
import com.taxonic.carml.engine.reactivedev.join.impl.CarmlChildSideJoinStoreProvider;
import com.taxonic.carml.engine.reactivedev.join.impl.CarmlParentSideJoinConditionStoreProvider;
import com.taxonic.carml.engine.sourceresolver.ClassPathResolver;
import com.taxonic.carml.logicalsourceresolver.CsvResolver;
import com.taxonic.carml.logicalsourceresolver.XPathResolver;
import com.taxonic.carml.model.TriplesMap;
import com.taxonic.carml.util.RmlMappingLoader;
import com.taxonic.carml.vocab.Rdf;
import java.io.InputStream;
import java.nio.file.Path;
import java.text.Normalizer;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.impl.ValidatingValueFactory;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
@ExtendWith(MockitoExtension.class)
class RdfRmlMapperTest {
@Test
void givenBuilderWithNoLogicalSourceResolver_whenBuild_thenThrowException() {
// Given
RdfRmlMapper.Builder builder = RdfRmlMapper.builder();
// When
RmlMapperException rmlMapperException = assertThrows(RmlMapperException.class, builder::build);
// Then
assertThat(rmlMapperException.getMessage(), is("No logical source resolver suppliers specified."));
}
@Test
void givenBuilderWithNoMapping_whenBuild_thenThrowException() {
// Given
RdfRmlMapper.Builder builder = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.XPath, XPathResolver::getInstance);
// When
RmlMapperException rmlMapperException = assertThrows(RmlMapperException.class, builder::build);
// Then
assertThat(rmlMapperException.getMessage(), is("No actionable triples maps provided."));
}
@Test
void givenBuilderWithMappingWithUnsupportedReferenceFormulation_whenBuild_thenThrowException() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("mapping.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper.Builder builder = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.JsonPath, XPathResolver::getInstance)
.triplesMaps(mapping);
// When
RmlMapperException rmlMapperException = assertThrows(RmlMapperException.class, builder::build);
// Then
assertThat(rmlMapperException.getMessage(),
is("No logical source resolver supplier bound for reference formulation http://semweb.mmlab.be/ns/ql#XPath"));
}
@Test
void givenAllOptions_whenBuild_thenBuildMapperCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("mapping.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper.Builder builder = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.XPath, XPathResolver::getInstance)
.triplesMaps(mapping)
.valueFactorySupplier(ValidatingValueFactory::new)
.classPathResolver("classpath")
.fileResolver(Path.of("file"))
.iriUpperCasePercentEncoding(true)
.iriUnicodeNormalization(Normalizer.Form.NFKC)
.childSideJoinStoreProvider(CarmlChildSideJoinStoreProvider.of())
.parentSideJoinConditionStoreProvider(CarmlParentSideJoinConditionStoreProvider.of())
.addFunctions(new Object())
.sourceResolver(o -> Optional.empty());
// When
RdfRmlMapper rmlMapper = builder.build();
// Then
assertThat(rmlMapper.getTriplesMaps(), is(mapping));
}
@Test
void givenMappingExpectingInputStream_whenMapCalledWithoutInputStream_thenThrowException() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
// When
RmlMapperException rmlMapperException = assertThrows(RmlMapperException.class, rmlMapper::map);
// Then
assertThat(rmlMapperException.getMessage(), is("Could not resolve input stream with name DEFAULT for logical"
+ " source resource <http://example.com/mapping/LogicalSource>"));
}
@Test
void givenMappingExpectingInputStream_whenMapWithInputStream_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
// When
Flux<Statement> statements = rmlMapper.map(sourceInputStream);
// Then
StepVerifier.create(statements)
.expectNextCount(22)
.expectComplete()
.verify();
}
@Test
void givenMappingExpectingInputStream_whenMapPerTriplesMapWithInputStream_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
TriplesMap carMapping = getTriplesMapByName("http://example.com/mapping/CarMapping", mapping);
TriplesMap makeMapping = getTriplesMapByName("http://example.com/mapping/MakeMapping", mapping);
// When
Map<TriplesMap, Flux<Statement>> statementsPerTriplesMap = rmlMapper.mapPerTriplesMap(sourceInputStream);
// Then
StepVerifier deferredCarStatements = StepVerifier.create(statementsPerTriplesMap.get(carMapping))
.expectNextCount(18)
.expectComplete()
.verifyLater();
StepVerifier deferredMakeStatements = StepVerifier.create(statementsPerTriplesMap.get(makeMapping))
.expectNextCount(4)
.expectComplete()
.verifyLater();
deferredCarStatements.verify();
deferredMakeStatements.verify();
}
@Test
void givenMappingExpectingInputStream_whenMapToModelWithInputStream_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
// When
Model model = rmlMapper.mapToModel(sourceInputStream);
// Then
assertThat(model.size(), is(21));
}
@Test
void givenMappingExpectingInputStreamAndTriplesMapFilter_whenMapToModelWithInputStreamAndFilter_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
TriplesMap makeMapping = mapping.stream()
.filter(tm -> tm.getResourceName()
.equals("http://example.com/mapping/MakeMapping"))
.findFirst()
.orElseThrow(IllegalStateException::new);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
// When
Model model = rmlMapper.mapToModel(sourceInputStream, Set.of(makeMapping));
// Then
assertThat(model.size(), is(3));
}
@Test
void givenMappingExpectingInputStream_whenMapItemToModelWithInputStream_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
// When
Model model = rmlMapper.mapItemToModel(sourceInputStream);
// Then
assertThat(model.size(), is(21));
}
@Test
void givenMappingExpectingInputStreamAndFilter_whenMapItemToModelWithInputStreamAndFilter_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
TriplesMap makeMapping = getTriplesMapByName("http://example.com/mapping/MakeMapping", mapping);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
InputStream sourceInputStream = RdfRmlMapperTest.class.getResourceAsStream("cars.csv");
// When
Model model = rmlMapper.mapItemToModel(sourceInputStream, Set.of(makeMapping));
// Then
assertThat(model.size(), is(3));
}
@Test
void givenMappingExpectingNamedInputStream_whenMapToModelWithInputStream_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars-stream-name.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
Map<String, InputStream> namedInputStream = Map.of("cars", RdfRmlMapperTest.class.getResourceAsStream("cars.csv"));
// When
Model model = rmlMapper.mapToModel(namedInputStream);
// Then
assertThat(model.size(), is(21));
}
@Test
void givenMappingExpectingNamedInputStream_whenMapToModelWithoutThatInputStream_thenThrowException() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars-stream-name.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
Map<String, InputStream> namedInputStream = Map.of("foo", RdfRmlMapperTest.class.getResourceAsStream("cars.csv"));
// When
RmlMapperException rmlMapperException =
assertThrows(RmlMapperException.class, () -> rmlMapper.mapToModel(namedInputStream));
// Then
assertThat(rmlMapperException.getMessage(), is("Could not resolve input stream with name cars for logical"
+ " source resource <http://example.com/mapping/LogicalSource>"));
}
@Test
void givenMappingExpectingNamedInputStreamAndFilter_whenMapItemToModelWithInputStreamAndFilter_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars-stream-name.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
TriplesMap makeMapping = getTriplesMapByName("http://example.com/mapping/MakeMapping", mapping);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.build();
Map<String, InputStream> namedInputStream = Map.of("cars", RdfRmlMapperTest.class.getResourceAsStream("cars.csv"));
// When
Model model = rmlMapper.mapToModel(namedInputStream, Set.of(makeMapping));
// Then
assertThat(model.size(), is(3));
}
@Test
void givenMappingExpectingFileSource_whenMapToModel_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars-file-input.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.classPathResolver(ClassPathResolver.of(RdfRmlMapperTest.class))
.build();
// When
Model model = rmlMapper.mapToModel();
// Then
assertThat(model.size(), is(21));
}
@Test
void givenMappingExpectingFileSourceAndFilter_whenMapItemToModelWithFilter_thenMapCorrectly() {
// Given
InputStream mappingSource = RdfRmlMapperTest.class.getResourceAsStream("cars-file-input.rml.ttl");
Set<TriplesMap> mapping = RmlMappingLoader.build()
.load(RDFFormat.TURTLE, mappingSource);
TriplesMap makeMapping = getTriplesMapByName("http://example.com/mapping/MakeMapping", mapping);
RdfRmlMapper rmlMapper = RdfRmlMapper.builder()
.setLogicalSourceResolver(Rdf.Ql.Csv, CsvResolver::getInstance)
.triplesMaps(mapping)
.classPathResolver(ClassPathResolver.of(RdfRmlMapperTest.class))
.build();
// When
Model model = rmlMapper.mapToModel(Set.of(makeMapping));
// Then
assertThat(model.size(), is(3));
}
private static TriplesMap getTriplesMapByName(String name, Set<TriplesMap> mapping) {
return mapping.stream()
.filter(tm -> tm.getResourceName()
.equals(name))
.findFirst()
.orElseThrow(IllegalStateException::new);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport;
import org.elasticsearch.common.unit.TimeValue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A connection profile describes how many connection are established to specific node for each of the available request types.
* ({@link org.elasticsearch.transport.TransportRequestOptions.Type}). This allows to tailor a connection towards a specific usage.
*/
public final class ConnectionProfile {
/**
* A pre-built light connection profile that shares a single connection across all
* types.
*/
public static final ConnectionProfile LIGHT_PROFILE = new ConnectionProfile(
Collections.singletonList(new ConnectionTypeHandle(0, 1, EnumSet.of(
TransportRequestOptions.Type.BULK,
TransportRequestOptions.Type.PING,
TransportRequestOptions.Type.RECOVERY,
TransportRequestOptions.Type.REG,
TransportRequestOptions.Type.STATE))), 1, null, null);
private final List<ConnectionTypeHandle> handles;
private final int numConnections;
private final TimeValue connectTimeout;
private final TimeValue handshakeTimeout;
private ConnectionProfile(List<ConnectionTypeHandle> handles, int numConnections, TimeValue connectTimeout, TimeValue handshakeTimeout)
{
this.handles = handles;
this.numConnections = numConnections;
this.connectTimeout = connectTimeout;
this.handshakeTimeout = handshakeTimeout;
}
/**
* A builder to build a new {@link ConnectionProfile}
*/
public static class Builder {
private final List<ConnectionTypeHandle> handles = new ArrayList<>();
private final Set<TransportRequestOptions.Type> addedTypes = EnumSet.noneOf(TransportRequestOptions.Type.class);
private int offset = 0;
private TimeValue connectTimeout;
private TimeValue handshakeTimeout;
/**
* Sets a connect timeout for this connection profile
*/
public void setConnectTimeout(TimeValue connectTimeout) {
if (connectTimeout.millis() < 0) {
throw new IllegalArgumentException("connectTimeout must be non-negative but was: " + connectTimeout);
}
this.connectTimeout = connectTimeout;
}
/**
* Sets a handshake timeout for this connection profile
*/
public void setHandshakeTimeout(TimeValue handshakeTimeout) {
if (handshakeTimeout.millis() < 0) {
throw new IllegalArgumentException("handshakeTimeout must be non-negative but was: " + handshakeTimeout);
}
this.handshakeTimeout = handshakeTimeout;
}
/**
* Adds a number of connections for one or more types. Each type can only be added once.
* @param numConnections the number of connections to use in the pool for the given connection types
* @param types a set of types that should share the given number of connections
*/
public void addConnections(int numConnections, TransportRequestOptions.Type... types) {
if (types == null || types.length == 0) {
throw new IllegalArgumentException("types must not be null");
}
for (TransportRequestOptions.Type type : types) {
if (addedTypes.contains(type)) {
throw new IllegalArgumentException("type [" + type + "] is already registered");
}
}
addedTypes.addAll(Arrays.asList(types));
handles.add(new ConnectionTypeHandle(offset, numConnections, EnumSet.copyOf(Arrays.asList(types))));
offset += numConnections;
}
/**
* Creates a new {@link ConnectionProfile} based on the added connections.
* @throws IllegalStateException if any of the {@link org.elasticsearch.transport.TransportRequestOptions.Type} enum is missing
*/
public ConnectionProfile build() {
EnumSet<TransportRequestOptions.Type> types = EnumSet.allOf(TransportRequestOptions.Type.class);
types.removeAll(addedTypes);
if (types.isEmpty() == false) {
throw new IllegalStateException("not all types are added for this connection profile - missing types: " + types);
}
return new ConnectionProfile(Collections.unmodifiableList(handles), offset, connectTimeout, handshakeTimeout);
}
}
/**
* Returns the connect timeout or <code>null</code> if no explicit timeout is set on this profile.
*/
public TimeValue getConnectTimeout() {
return connectTimeout;
}
/**
* Returns the handshake timeout or <code>null</code> if no explicit timeout is set on this profile.
*/
public TimeValue getHandshakeTimeout() {
return handshakeTimeout;
}
/**
* Returns the total number of connections for this profile
*/
public int getNumConnections() {
return numConnections;
}
/**
* Returns the number of connections per type for this profile. This might return a count that is shared with other types such
* that the sum of all connections per type might be higher than {@link #getNumConnections()}. For instance if
* {@link org.elasticsearch.transport.TransportRequestOptions.Type#BULK} shares connections with
* {@link org.elasticsearch.transport.TransportRequestOptions.Type#REG} they will return both the same number of connections from
* this method but the connections are not distinct.
*/
public int getNumConnectionsPerType(TransportRequestOptions.Type type) {
for (ConnectionTypeHandle handle : handles) {
if (handle.getTypes().contains(type)) {
return handle.length;
}
}
throw new AssertionError("no handle found for type: " + type);
}
/**
* Returns the type handles for this connection profile
*/
List<ConnectionTypeHandle> getHandles() {
return Collections.unmodifiableList(handles);
}
/**
* Connection type handle encapsulates the logic which connection
*/
static final class ConnectionTypeHandle {
public final int length;
public final int offset;
private final Set<TransportRequestOptions.Type> types;
private final AtomicInteger counter = new AtomicInteger();
private ConnectionTypeHandle(int offset, int length, Set<TransportRequestOptions.Type> types) {
this.length = length;
this.offset = offset;
this.types = types;
}
/**
* Returns one of the channels out configured for this handle. The channel is selected in a round-robin
* fashion.
*/
<T> T getChannel(T[] channels) {
if (length == 0) {
throw new IllegalStateException("can't select channel size is 0");
}
assert channels.length >= offset + length : "illegal size: " + channels.length + " expected >= " + (offset + length);
return channels[offset + Math.floorMod(counter.incrementAndGet(), length)];
}
/**
* Returns all types for this handle
*/
Set<TransportRequestOptions.Type> getTypes() {
return types;
}
}
}
| |
/*
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 Vimeo
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.vimeo.turnstile.database;
import android.content.Context;
import android.database.Cursor;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import android.text.TextUtils;
import com.vimeo.turnstile.BaseTask;
import com.vimeo.turnstile.Serializer;
import com.vimeo.turnstile.utils.TaskLogger;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
/**
* The database to hold all the {@link BaseTask}.
* <p/>
* Created by kylevenn on 2/10/16.
*/
class TaskDatabase<T extends BaseTask> {
private static final Executor IO_THREAD = Executors.newSingleThreadExecutor();
private final Serializer<T> mSerializer;
private final TaskDatabaseOpenHelper<T> mTaskDatabase;
/**
* Runs a runnable on the executor for this
* database. All write operations on this
* database that are not run synchronously
* should be run using this executor, in order
* to guarantee correct execution order.
*
* @param runnable the runnable to execute.
*/
static void execute(@NonNull Runnable runnable) {
IO_THREAD.execute(runnable);
}
TaskDatabase(@NonNull Context context, @NonNull String name, @NonNull Serializer<T> serializer) {
mTaskDatabase = new TaskDatabaseOpenHelper<>(context, name, serializer);
mSerializer = serializer;
}
/**
* Gets the task associated with the
* specified id.
*
* @param id the id to look for
* @return a task associated with the
* id, or null if it does not exist.
*/
@WorkerThread
@Nullable
T getTask(@NonNull String id) {
if (id.isEmpty()) {
return null;
}
Cursor cursor = mTaskDatabase.itemForIdQuery(id);
List<T> tasks = getTasksFromCursor(cursor);
if (tasks.size() > 1) {
throw new IllegalStateException("More than one task with the same id: " + id);
}
return !tasks.isEmpty() ? tasks.get(0) : null;
}
/**
* Retrieves a list of tasks from the database
* that match the specified {@code where} clause
* that is passed in. If {@code null} is passed
* in, then all tasks in the database will be
* returned to the caller.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @return a non-null list of tasks, may be
* empty if the query does not return any tasks.
*/
@WorkerThread
@NonNull
List<T> getAllTasks() {
Cursor cursor = mTaskDatabase.allItemsQuery();
return getTasksFromCursor(cursor);
}
@WorkerThread
@NonNull
private List<T> getTasksFromCursor(@NonNull Cursor cursor) {
List<T> tasks = new ArrayList<>();
try {
while (cursor.moveToNext()) {
T task = TaskDatabaseOpenHelper.getTaskFromCursor(cursor, mSerializer);
if (task != null) {
// If something went wrong in deserialization, it will be null. It's logged earlier, but
// for now, we fail silently in the night 2/25/16 [KV]
tasks.add(task);
}
}
} catch (Exception e) {
TaskLogger.getLogger().e("Unable to retrieve tasks from database", e);
} finally {
cursor.close();
}
return tasks;
}
/**
* Inserts a task into the database and
* returns the id of the row that the
* task was inserted int.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @param task the task to insert, must
* not be null.
* @return the id of the row inserted,
* if the insert fails, -1 will be returned.
*/
@WorkerThread
long insert(@NonNull T task) {
return mTaskDatabase.insert(task);
}
/**
* Inserts a task if it doesn't exist,
* otherwise updates the current task that
* exists with the particular task id with
* the new values of this task.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @param task the task to insert or update,
* must not be null.
* @return the id of the row into which the
* task was inserted or updated at.
*/
@WorkerThread
boolean upsert(@NonNull T task) {
return mTaskDatabase.upsertItem(task);
}
/**
* Returns a count of all the tasks
* in the database.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @return the number of tasks in the database.
*/
@WorkerThread
long count() {
return mTaskDatabase.getCount();
}
// -----------------------------------------------------------------------------------------------------
// Delete
// -----------------------------------------------------------------------------------------------------
// <editor-fold desc="Delete">
/**
* Removes a task with the same id as
* the task passed in from the database.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @param task the task to remove from
* the database.
*/
@WorkerThread
void remove(@NonNull T task) {
remove(task.getId());
}
/**
* Deletes the task from the database
* with the specified id.
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*
* @param id the id of the task to delete.
* If the id is null for whatever
* reason, this method will
* simply return without doing
* anything.
*/
@WorkerThread
void remove(@NonNull String id) {
if (TextUtils.isEmpty(id)) {
TaskLogger.getLogger().w("Warning, TaskDatabase.remove called with empty id.");
return;
}
delete(id);
}
@WorkerThread
private void delete(@NonNull String id) {
mTaskDatabase.deleteItemForId(id);
}
/**
* Removes all tasks from the database
* <p/>
* NOTE: this method is synchronous and
* should be called from a {@link WorkerThread}.
*/
@WorkerThread
void removeAll() {
mTaskDatabase.truncateDatabase();
mTaskDatabase.vacuumDatabase();
}
// </editor-fold>
}
| |
/*
* Copyright (C) 1999 Jesse E. Peterson
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
*
*/
package com.jpeterson.x10.module;
import java.io.EOFException;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import com.jpeterson.util.HexFormat;
import com.jpeterson.x10.InterruptedTransmissionException;
import com.jpeterson.x10.TooManyAttemptsException;
/**
* Create a standard CM11 transmission event to transmit. Standard
* transmission events are transmitted to the CM11 by a protocol
* that provides safeguards to ensure that the message is sent
* to the CM11 device correctly. The safeguards implemeted are
* a checksum algorythm.
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public class CM11AStandardTransmission extends Object
implements CM11ATransmissionEvent
{
protected byte[] packet;
private int attempts;
private int maxAttempts;
private static final byte CHECKSUM_OK = (byte)0x00;
private static final byte INTERFACE_READY = (byte)0x55;
/**
* Create a standard CM11 transmission event to transmit the specified
* packet of bytes.
*
* @param packet The packet of bytes to transmit to the CM11 interface
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public CM11AStandardTransmission(byte[] packet)
{
this.packet = packet;
attempts = 0;
setMaxAttempts(3);
}
/**
* Transmit a standard CM11 command from the PC to the CM11 interface.
* The standard transmission sends a packet of bytes, receives a
* checksum, validates the checksum, then recieves a transmission
* success message from the interface indicating successful completion
* of the transmission. If validation of the checksum or reception
* of a transmission success message fails, the transmission is retried.
*
* @param in Input stream to read from
* @param out Output stream to write to
* @exception TooManyAttemptsException Too many retries have occurred
* @exception InterruptedTransmissionException An unsolicited interrupt
* has been received during the transmission.
* @exception IOException Some sort of I/O or I/O protocol error has
* occurred
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public void transmit(InputStream in, OutputStream out)
throws TooManyAttemptsException, InterruptedTransmissionException,
EOFException, IOException
{
int result;
byte byteValue;
HexFormat hex = new HexFormat();
// mark off an attempt
++attempts;
if (attempts > maxAttempts)
{
throw new TooManyAttemptsException();
}
if (System.getProperty("DEBUG") != null)
{
System.out.println("Sending CM11AStandardTransmission");
System.out.print("PC->CM11A: ");
String prefix = "";
for (int k = 0; k < packet.length; k++)
{
System.out.print(prefix + "0x" + hex.format(packet[k]));
prefix = ",";
}
System.out.println();
}
// send packet
out.write(packet);
// read checksum
if ((result = in.read()) == -1)
{
throw new EOFException("Expected checksum, received end of stream indicator.");
}
byteValue = (byte)result;
if (System.getProperty("DEBUG") != null)
{
System.out.println("Received checksum: " + hex.format(byteValue));
System.out.println("Expected checksum: " + hex.format(getChecksum()));
}
if (byteValue != getChecksum())
{
if ((byteValue == CM11A.CM11_RECEIVE_EVENT) ||
(byteValue == CM11A.CM11_POWER_FAILURE) ||
(byteValue == CM11A.CM11_MACRO_INITIATED))
{
throw new InterruptedTransmissionException(byteValue);
}
else
{
System.err.println("Breakdown in protocol, consuming all bytes in CM11AStandardTransmission after getChecksum().");
// consume all bytes in input stream
byte[] buffer = new byte[20];
while (in.available() > 0)
{
in.read(buffer);
}
// retransmit
transmit(in, out);
return;
}
}
if (System.getProperty("DEBUG") != null)
{
System.out.println("PC<-CM11A: 0x" + hex.format(byteValue));
}
// checksum correct
out.write(CHECKSUM_OK);
if (System.getProperty("DEBUG") != null)
{
System.out.println("PC->CM11A: 0x" + hex.format(CHECKSUM_OK));
}
/*
* 9th October 2007
*
* It looks like RCA HC60CRX takes about 700-800 milliseconds to execute
* the command (as I understand, this is by design of X10 protocol).
*
* So, after acknowledging that the checksum perceived by CM11A was correct
* (i.e. sending CHECKSUM_OK [(byte)0x00]), let's wait for the device
* to be ready to send INTERFACE_READY [(byte)0x55].
*
* - Manish Pandya <manish at meetamanish dot com>
*/
while (in.available() == 0)
{
try
{
Thread.sleep(100);
} catch (InterruptedException e)
{
e.printStackTrace();
}
}
// read "Interface Ready"
if ((result = in.read()) == -1)
{
throw new EOFException("Expected 'Interface Ready', received end of stream indicator.");
}
byteValue = (byte)result;
if (byteValue != INTERFACE_READY)
{
if ((byteValue == CM11A.CM11_RECEIVE_EVENT) ||
(byteValue == CM11A.CM11_POWER_FAILURE) ||
(byteValue == CM11A.CM11_MACRO_INITIATED))
{
throw new InterruptedTransmissionException(byteValue);
}
else
{
System.err.println("Breakdown in protocol, consuming all bytes in CM11AStandardTransmission after 'Interface Ready'.");
// consume all bytes in input stream
byte[] buffer = new byte[20];
while (in.available() > 0)
{
in.read(buffer);
}
// retransmit
transmit(in, out);
return;
}
}
if (System.getProperty("DEBUG") != null)
{
System.out.println("PC<-CM11A: 0x" + hex.format(byteValue));
}
// transmission complete
}
/**
* Retrieve the checksum of the bytes in the message or the X10
* transmission.
*
* @return the checksum
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
protected byte getChecksum()
{
int sum = 0;
for (int i = 0; i < packet.length; i++)
{
sum += packet[i];
}
return((byte)sum);
}
/**
* Retrieve the number of transmission attempts.
*
* @return the number of transmission attempts
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public int getNumAttempts()
{
return(attempts);
}
/**
* Set the number of transmission attempts
*
* @param maxAttempts the maximum number of transmission attempts
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public void setMaxAttempts(int maxAttempts)
{
this.maxAttempts = maxAttempts;
}
/**
* Create a string representation of the transmission.
*
* @return String representation of the transmission.
*
* @author Jesse Peterson <jesse@jpeterson.com>
*/
public String toString()
{
StringBuffer buffer = new StringBuffer();
HexFormat hexFormat = new HexFormat();
String prefix = "";
buffer.append("CM11AStandardTransmission - packet: [");
for (int i = 0; i < packet.length; i++)
{
buffer.append(prefix).append("0x");
buffer.append(hexFormat.format(packet[i]));
prefix = ", ";
}
buffer.append("]");
return(buffer.toString());
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/certificatemanager/v1/certificate_manager.proto
package com.google.cloud.certificatemanager.v1;
/**
*
*
* <pre>
* Request for the `ListCertificates` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.ListCertificatesRequest}
*/
public final class ListCertificatesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.ListCertificatesRequest)
ListCertificatesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCertificatesRequest.newBuilder() to construct.
private ListCertificatesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCertificatesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCertificatesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListCertificatesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 16:
{
pageSize_ = input.readInt32();
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
filter_ = s;
break;
}
case 42:
{
java.lang.String s = input.readStringRequireUtf8();
orderBy_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_ListCertificatesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_ListCertificatesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.ListCertificatesRequest.class,
com.google.cloud.certificatemanager.v1.ListCertificatesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of certificates to return per call.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
private volatile java.lang.Object pageToken_;
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
private volatile java.lang.Object filter_;
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
private volatile java.lang.Object orderBy_;
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.certificatemanager.v1.ListCertificatesRequest)) {
return super.equals(obj);
}
com.google.cloud.certificatemanager.v1.ListCertificatesRequest other =
(com.google.cloud.certificatemanager.v1.ListCertificatesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.certificatemanager.v1.ListCertificatesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `ListCertificates` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.ListCertificatesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.ListCertificatesRequest)
com.google.cloud.certificatemanager.v1.ListCertificatesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_ListCertificatesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_ListCertificatesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.ListCertificatesRequest.class,
com.google.cloud.certificatemanager.v1.ListCertificatesRequest.Builder.class);
}
// Construct using com.google.cloud.certificatemanager.v1.ListCertificatesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_ListCertificatesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.ListCertificatesRequest
getDefaultInstanceForType() {
return com.google.cloud.certificatemanager.v1.ListCertificatesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.ListCertificatesRequest build() {
com.google.cloud.certificatemanager.v1.ListCertificatesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.ListCertificatesRequest buildPartial() {
com.google.cloud.certificatemanager.v1.ListCertificatesRequest result =
new com.google.cloud.certificatemanager.v1.ListCertificatesRequest(this);
result.parent_ = parent_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
result.filter_ = filter_;
result.orderBy_ = orderBy_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.certificatemanager.v1.ListCertificatesRequest) {
return mergeFrom((com.google.cloud.certificatemanager.v1.ListCertificatesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.certificatemanager.v1.ListCertificatesRequest other) {
if (other
== com.google.cloud.certificatemanager.v1.ListCertificatesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.certificatemanager.v1.ListCertificatesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.certificatemanager.v1.ListCertificatesRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location from which the certificate should be listed,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of certificates to return per call.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Maximum number of certificates to return per call.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of certificates to return per call.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListCertificatesResponse`. Indicates that
* this is a continuation of a prior `ListCertificates` call, and that the
* system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
onChanged();
return this;
}
/**
*
*
* <pre>
* Filter expression to restrict the Certificates returned.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of Certificate field names used to specify the order of the returned
* results. The default sorting order is ascending. To specify descending
* order for a field, add a suffix " desc".
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.ListCertificatesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.ListCertificatesRequest)
private static final com.google.cloud.certificatemanager.v1.ListCertificatesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.certificatemanager.v1.ListCertificatesRequest();
}
public static com.google.cloud.certificatemanager.v1.ListCertificatesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCertificatesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListCertificatesRequest>() {
@java.lang.Override
public ListCertificatesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListCertificatesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListCertificatesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCertificatesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.ListCertificatesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.type;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.type.DateTimeEncoding;
import io.prestosql.spi.type.LongTimestamp;
import io.prestosql.spi.type.LongTimestampWithTimeZone;
import io.prestosql.spi.type.TimeZoneKey;
import io.prestosql.spi.type.TimestampType;
import io.prestosql.spi.type.TimestampWithTimeZoneType;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.base.Preconditions.checkArgument;
import static io.prestosql.spi.type.DateTimeEncoding.unpackMillisUtc;
import static io.prestosql.spi.type.DateTimeEncoding.unpackZoneKey;
import static io.prestosql.spi.type.TimeZoneKey.getTimeZoneKey;
import static io.prestosql.spi.type.TimestampType.MAX_SHORT_PRECISION;
import static java.lang.Math.floorMod;
import static java.lang.Math.multiplyExact;
import static java.lang.String.format;
import static java.time.temporal.ChronoField.MICRO_OF_SECOND;
public final class Timestamps
{
public static final Pattern DATETIME_PATTERN = Pattern.compile("" +
"(?<year>[-+]?\\d{4,})-(?<month>\\d{1,2})-(?<day>\\d{1,2})" +
"(?: (?<hour>\\d{1,2}):(?<minute>\\d{1,2})(?::(?<second>\\d{1,2})(?:\\.(?<fraction>\\d+))?)?)?" +
"\\s*(?<timezone>.+)?");
private static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ss");
private static final long[] POWERS_OF_TEN = {
1L,
10L,
100L,
1000L,
10_000L,
100_000L,
1_000_000L,
10_000_000L,
100_000_000L,
1_000_000_000L,
10_000_000_000L,
100_000_000_000L,
1000_000_000_000L
};
public static final int MILLISECONDS_PER_SECOND = 1000;
public static final int MICROSECONDS_PER_SECOND = 1_000_000;
public static final int MICROSECONDS_PER_MILLISECOND = 1000;
public static final long PICOSECONDS_PER_SECOND = 1_000_000_000_000L;
public static final int NANOSECONDS_PER_MILLISECOND = 1_000_000;
public static final int NANOSECONDS_PER_MICROSECOND = 1_000;
public static final int PICOSECONDS_PER_MILLISECOND = 1_000_000_000;
public static final int PICOSECONDS_PER_MICROSECOND = 1_000_000;
public static final int PICOSECONDS_PER_NANOSECOND = 1000;
private Timestamps() {}
private static long roundDiv(long value, long factor)
{
checkArgument(factor > 0, "factor must be positive");
if (factor == 1) {
return value;
}
if (value >= 0) {
return (value + (factor / 2)) / factor;
}
return (value + 1 - (factor / 2)) / factor;
}
public static long scaleEpochMicrosToMillis(long value)
{
return Math.floorDiv(value, MICROSECONDS_PER_MILLISECOND);
}
private static long scaleEpochMillisToSeconds(long epochMillis)
{
return Math.floorDiv(epochMillis, MILLISECONDS_PER_SECOND);
}
private static long scaleEpochMicrosToSeconds(long epochMicros)
{
return Math.floorDiv(epochMicros, MICROSECONDS_PER_SECOND);
}
public static long scaleEpochMillisToMicros(long epochMillis)
{
return multiplyExact(epochMillis, MICROSECONDS_PER_MILLISECOND);
}
public static long epochSecondToMicrosWithRounding(long epochSecond, long picoOfSecond)
{
return epochSecond * MICROSECONDS_PER_SECOND + roundDiv(picoOfSecond, PICOSECONDS_PER_MICROSECOND);
}
public static int getMicrosOfSecond(long epochMicros)
{
return floorMod(epochMicros, MICROSECONDS_PER_SECOND);
}
public static int getMillisOfSecond(long epochMillis)
{
return floorMod(epochMillis, MILLISECONDS_PER_SECOND);
}
public static int getMicrosOfMilli(long epochMicros)
{
return floorMod(epochMicros, MICROSECONDS_PER_MILLISECOND);
}
public static long toEpochMicros(long epochMillis, int fraction)
{
return scaleEpochMillisToMicros(epochMillis) + fraction / 1_000_000;
}
public static long round(long value, int magnitude)
{
return roundToNearest(value, POWERS_OF_TEN[magnitude]);
}
public static long roundToNearest(long value, long bound)
{
return roundDiv(value, bound) * bound;
}
private static long scaleFactor(int fromPrecision, int toPrecision)
{
if (fromPrecision > toPrecision) {
throw new IllegalArgumentException("fromPrecision must be <= toPrecision");
}
return POWERS_OF_TEN[toPrecision - fromPrecision];
}
/**
* Rescales a value of the given precision to another precision by adding 0s or truncating.
*/
public static long rescale(long value, int fromPrecision, int toPrecision)
{
if (value < 0) {
throw new IllegalArgumentException("value must be >= 0");
}
if (fromPrecision <= toPrecision) {
value *= scaleFactor(fromPrecision, toPrecision);
}
else {
value /= scaleFactor(toPrecision, fromPrecision);
}
return value;
}
public static boolean timestampHasTimeZone(String value)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches()) {
throw new IllegalArgumentException(format("Invalid timestamp '%s'", value));
}
return matcher.group("timezone") != null;
}
public static int extractTimestampPrecision(String value)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches()) {
throw new IllegalArgumentException(format("Invalid timestamp '%s'", value));
}
String fraction = matcher.group("fraction");
if (fraction == null) {
return 0;
}
return fraction.length();
}
public static LocalDateTime toLocalDateTime(TimestampType type, ConnectorSession session, Block block, int position)
{
int precision = type.getPrecision();
long epochMicros;
int picosOfMicro = 0;
if (precision <= 3) {
epochMicros = scaleEpochMillisToMicros(type.getLong(block, position));
}
else if (precision <= MAX_SHORT_PRECISION) {
epochMicros = type.getLong(block, position);
}
else {
LongTimestamp timestamp = (LongTimestamp) type.getObject(block, position);
epochMicros = timestamp.getEpochMicros();
picosOfMicro = timestamp.getPicosOfMicro();
}
long epochSecond = scaleEpochMicrosToSeconds(epochMicros);
int nanoFraction = getMicrosOfSecond(epochMicros) * NANOSECONDS_PER_MICROSECOND + (int) (roundToNearest(picosOfMicro, PICOSECONDS_PER_NANOSECOND) / PICOSECONDS_PER_NANOSECOND);
Instant instant = Instant.ofEpochSecond(epochSecond, nanoFraction);
if (session.isLegacyTimestamp()) {
return LocalDateTime.ofInstant(instant, session.getTimeZoneKey().getZoneId());
}
return LocalDateTime.ofInstant(instant, ZoneOffset.UTC);
}
public static ZonedDateTime toZonedDateTime(TimestampWithTimeZoneType type, Block block, int position)
{
int precision = type.getPrecision();
long epochMillis;
int picosOfMilli = 0;
ZoneId zoneId;
if (precision <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) {
long packedEpochMillis = type.getLong(block, position);
epochMillis = unpackMillisUtc(packedEpochMillis);
zoneId = unpackZoneKey(packedEpochMillis).getZoneId();
}
else {
LongTimestampWithTimeZone timestamp = (LongTimestampWithTimeZone) type.getObject(block, position);
epochMillis = timestamp.getEpochMillis();
picosOfMilli = timestamp.getPicosOfMilli();
zoneId = getTimeZoneKey(timestamp.getTimeZoneKey()).getZoneId();
}
long epochSecond = scaleEpochMillisToSeconds(epochMillis);
int nanoFraction = getMillisOfSecond(epochMillis) * NANOSECONDS_PER_MILLISECOND + (int) (roundToNearest(picosOfMilli, PICOSECONDS_PER_NANOSECOND) / PICOSECONDS_PER_NANOSECOND);
return Instant.ofEpochSecond(epochSecond, nanoFraction).atZone(zoneId);
}
/**
* Formats a timestamp of the given precision. This method doesn't do any rounding, so it's expected that the
* combination of [epochMicros, picosSecond] is already rounded to the provided precision if necessary
*/
public static String formatTimestamp(int precision, long epochMicros, int picosOfMicro, ZoneId zoneId)
{
return formatTimestamp(precision, epochMicros, picosOfMicro, zoneId, TIMESTAMP_FORMATTER);
}
/**
* Formats a timestamp of the given precision. This method doesn't do any rounding, so it's expected that the
* combination of [epochMicros, picosSecond] is already rounded to the provided precision if necessary
*/
public static String formatTimestamp(int precision, long epochMicros, int picosOfMicro, ZoneId zoneId, DateTimeFormatter yearToSecondFormatter)
{
checkArgument(picosOfMicro >= 0 && picosOfMicro < PICOSECONDS_PER_MICROSECOND, "picosOfMicro is out of range [0, 1_000_000]");
Instant instant = Instant.ofEpochSecond(scaleEpochMicrosToSeconds(epochMicros));
LocalDateTime dateTime = LocalDateTime.ofInstant(instant, zoneId);
long picoFraction = ((long) getMicrosOfSecond(epochMicros)) * PICOSECONDS_PER_MICROSECOND + picosOfMicro;
return formatTimestamp(precision, dateTime, picoFraction, yearToSecondFormatter, builder -> {});
}
public static String formatTimestampWithTimeZone(int precision, long epochMillis, int picoSecondOfMilli, ZoneId zoneId)
{
Instant instant = Instant.ofEpochMilli(epochMillis);
LocalDateTime dateTime = LocalDateTime.ofInstant(instant, zoneId);
long picoFraction = ((long) getMillisOfSecond(epochMillis)) * PICOSECONDS_PER_MILLISECOND + picoSecondOfMilli;
return formatTimestamp(precision, dateTime, picoFraction, TIMESTAMP_FORMATTER, builder -> builder.append(" ").append(zoneId));
}
public static String formatTimestamp(int precision, LocalDateTime dateTime, long picoFraction, DateTimeFormatter yearToSecondFormatter, Consumer<StringBuilder> zoneIdFormatter)
{
StringBuilder builder = new StringBuilder();
builder.append(yearToSecondFormatter.format(dateTime));
if (precision > 0) {
builder.append(".");
builder.append(String.format("%0" + precision + "d", rescale(picoFraction, 12, precision)));
}
zoneIdFormatter.accept(builder);
return builder.toString();
}
public static Object parseTimestamp(int precision, String value)
{
if (precision <= MAX_SHORT_PRECISION) {
return parseShortTimestamp(value, ZoneOffset.UTC);
}
return parseLongTimestamp(value, ZoneOffset.UTC);
}
public static Object parseLegacyTimestamp(int precision, TimeZoneKey timeZoneKey, String value)
{
if (precision <= MAX_SHORT_PRECISION) {
return parseShortTimestamp(value, timeZoneKey.getZoneId());
}
return parseLongTimestamp(value, timeZoneKey.getZoneId());
}
public static Object parseTimestampWithTimeZone(int precision, String value)
{
if (precision <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) {
return parseShortTimestampWithTimeZone(value);
}
return parseLongTimestampWithTimeZone(value);
}
private static long parseShortTimestamp(String value, ZoneId zoneId)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches() || matcher.group("timezone") != null) {
throw new IllegalArgumentException("Invalid timestamp: " + value);
}
String year = matcher.group("year");
String month = matcher.group("month");
String day = matcher.group("day");
String hour = matcher.group("hour");
String minute = matcher.group("minute");
String second = matcher.group("second");
String fraction = matcher.group("fraction");
long epochSecond = toEpochSecond(year, month, day, hour, minute, second, zoneId);
int precision = 0;
long fractionValue = 0;
if (fraction != null) {
precision = fraction.length();
fractionValue = Long.parseLong(fraction);
}
if (precision <= 3) {
// scale to millis
return epochSecond * MILLISECONDS_PER_SECOND + rescale(fractionValue, precision, 3);
}
else if (precision <= MAX_SHORT_PRECISION) {
// scale to micros
return epochSecond * MICROSECONDS_PER_SECOND + rescale(fractionValue, precision, 6);
}
throw new IllegalArgumentException(format("Cannot parse '%s' as short timestamp. Max allowed precision = %s", value, MAX_SHORT_PRECISION));
}
private static LongTimestamp parseLongTimestamp(String value, ZoneId zoneId)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches() || matcher.group("timezone") != null) {
throw new IllegalArgumentException("Invalid timestamp: " + value);
}
String year = matcher.group("year");
String month = matcher.group("month");
String day = matcher.group("day");
String hour = matcher.group("hour");
String minute = matcher.group("minute");
String second = matcher.group("second");
String fraction = matcher.group("fraction");
if (fraction == null || fraction.length() <= MAX_SHORT_PRECISION) {
throw new IllegalArgumentException(format("Cannot parse '%s' as long timestamp. Precision must be in the range [%s, %s]", value, MAX_SHORT_PRECISION + 1, TimestampType.MAX_PRECISION));
}
int precision = fraction.length();
long epochSecond = toEpochSecond(year, month, day, hour, minute, second, zoneId);
long picoFraction = rescale(Long.parseLong(fraction), precision, 12);
return longTimestamp(epochSecond, picoFraction);
}
private static long parseShortTimestampWithTimeZone(String value)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches() || matcher.group("timezone") == null) {
throw new IllegalArgumentException("Invalid timestamp with time zone: " + value);
}
String year = matcher.group("year");
String month = matcher.group("month");
String day = matcher.group("day");
String hour = matcher.group("hour");
String minute = matcher.group("minute");
String second = matcher.group("second");
String fraction = matcher.group("fraction");
String timezone = matcher.group("timezone");
ZoneId zoneId = ZoneId.of(timezone);
long epochSecond = toEpochSecond(year, month, day, hour, minute, second, zoneId);
int precision = 0;
long fractionValue = 0;
if (fraction != null) {
precision = fraction.length();
fractionValue = Long.parseLong(fraction);
}
if (precision > MAX_SHORT_PRECISION) {
throw new IllegalArgumentException(format("Cannot parse '%s' as short timestamp. Max allowed precision = %s", value, MAX_SHORT_PRECISION));
}
long epochMillis = epochSecond * 1000 + rescale(fractionValue, precision, 3);
// TODO: parametric-timestamptz
return DateTimeEncoding.packDateTimeWithZone(epochMillis, timezone);
}
private static LongTimestampWithTimeZone parseLongTimestampWithTimeZone(String value)
{
Matcher matcher = DATETIME_PATTERN.matcher(value);
if (!matcher.matches() || matcher.group("timezone") == null) {
throw new IllegalArgumentException("Invalid timestamp: " + value);
}
String year = matcher.group("year");
String month = matcher.group("month");
String day = matcher.group("day");
String hour = matcher.group("hour");
String minute = matcher.group("minute");
String second = matcher.group("second");
String fraction = matcher.group("fraction");
String timezone = matcher.group("timezone");
if (fraction == null || fraction.length() <= TimestampWithTimeZoneType.MAX_SHORT_PRECISION) {
throw new IllegalArgumentException(format("Cannot parse '%s' as long timestamp. Precision must be in the range [%s, %s]", value, TimestampWithTimeZoneType.MAX_SHORT_PRECISION + 1, TimestampWithTimeZoneType.MAX_PRECISION));
}
ZoneId zoneId = ZoneId.of(timezone);
long epochSecond = toEpochSecond(year, month, day, hour, minute, second, zoneId);
return LongTimestampWithTimeZone.fromEpochSecondsAndFraction(epochSecond, rescale(Long.parseLong(fraction), fraction.length(), 12), getTimeZoneKey(timezone));
}
private static long toEpochSecond(String year, String month, String day, String hour, String minute, String second, ZoneId zoneId)
{
LocalDateTime timestamp = LocalDateTime.of(
Integer.parseInt(year),
Integer.parseInt(month),
Integer.parseInt(day),
hour == null ? 0 : Integer.parseInt(hour),
minute == null ? 0 : Integer.parseInt(minute),
second == null ? 0 : Integer.parseInt(second),
0);
// Only relevant for legacy timestamps. New timestamps are parsed using UTC, which doesn't
// have daylight savings transitions. TODO: remove once legacy timestamps are gone
List<ZoneOffset> offsets = zoneId.getRules().getValidOffsets(timestamp);
if (offsets.isEmpty()) {
throw new IllegalArgumentException("Invalid timestamp due to daylight savings transition");
}
return timestamp.toEpochSecond(offsets.get(0));
}
public static LongTimestamp longTimestamp(long precision, Instant start)
{
checkArgument(precision > MAX_SHORT_PRECISION && precision <= TimestampType.MAX_PRECISION, "Precision is out of range");
return new LongTimestamp(
start.getEpochSecond() * MICROSECONDS_PER_SECOND + start.getLong(MICRO_OF_SECOND),
(int) round((start.getNano() % PICOSECONDS_PER_NANOSECOND) * PICOSECONDS_PER_NANOSECOND, (int) (TimestampType.MAX_PRECISION - precision)));
}
public static LongTimestamp longTimestamp(long epochSecond, long fractionInPicos)
{
return new LongTimestamp(
multiplyExact(epochSecond, MICROSECONDS_PER_SECOND) + fractionInPicos / PICOSECONDS_PER_MICROSECOND,
(int) (fractionInPicos % PICOSECONDS_PER_MICROSECOND));
}
public static LongTimestampWithTimeZone longTimestampWithTimeZone(long precision, Instant start, TimeZoneKey timeZoneKey)
{
checkArgument(precision <= TimestampWithTimeZoneType.MAX_PRECISION, "Precision is out of range");
return LongTimestampWithTimeZone.fromEpochMillisAndFraction(
start.toEpochMilli(),
(int) round((start.getNano() % NANOSECONDS_PER_MILLISECOND) * PICOSECONDS_PER_NANOSECOND, (int) (TimestampWithTimeZoneType.MAX_PRECISION - precision)),
timeZoneKey);
}
public static LongTimestampWithTimeZone longTimestampWithTimeZone(long epochSecond, long fractionInPicos, ZoneId zoneId)
{
return LongTimestampWithTimeZone.fromEpochMillisAndFraction(
multiplyExact(epochSecond, MILLISECONDS_PER_SECOND) + fractionInPicos / PICOSECONDS_PER_MILLISECOND,
(int) (fractionInPicos % PICOSECONDS_PER_MILLISECOND),
getTimeZoneKey(zoneId.getId()));
}
public static long roundToEpochMillis(LongTimestampWithTimeZone timestamp)
{
long epochMillis = timestamp.getEpochMillis();
if (roundToNearest(timestamp.getPicosOfMilli(), PICOSECONDS_PER_MILLISECOND) == PICOSECONDS_PER_MILLISECOND) {
epochMillis++;
}
return epochMillis;
}
}
| |
/*
* Copyright 2015, Yahoo Inc.
* Copyrights licensed under the Apache License.
* See the accompanying LICENSE file for terms.
*/
package com.yahoo.dba.perf.myperf.common;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class ProcessListSummary implements java.io.Serializable{
private static final long serialVersionUID = 1L;
private static Logger logger = Logger.getLogger(ProcessListSummary.class.getName());
private Map<String, Integer> userSummary = new HashMap<String, Integer>();//group by user
private Map<String, Integer> hostSummary = new HashMap<String, Integer>();//group by host
private Map<String, Integer> commandSummary = new HashMap<String, Integer>();//group by command, including idle
private Map<String, Integer> commandHostSummary = new HashMap<String, Integer>();//group by command and host, including idle
private Map<String, Integer> stateSummary = new HashMap<String, Integer>();//active state
private List<ProcessListEntry> processList;
private Pattern pt = Pattern.compile("LIMIT\\s+\\d+", Pattern.CASE_INSENSITIVE);
private Pattern pt2 = Pattern.compile("LIMIT\\s+\\d+\\s*,\\s*\\d+", Pattern.CASE_INSENSITIVE);
private Map<String, ProcessListEntryAggregate> queries = new HashMap<String, ProcessListEntryAggregate>();
private Map<String, ProcessListEntryAggregate> queriesWithLimitStripped = new HashMap<String, ProcessListEntryAggregate>();//pagenation
private Map<String, ProcessListEntryAggregate> normalizedQueries = new HashMap<String, ProcessListEntryAggregate>();
private String innodbStatus;
private int totalAccumultaedTime = 0;
private ResultList lockList;
private ResultList trxList;
private ResultList clientList;
private List<ProcessListEntryProcessor> appProcessorList = new ArrayList<ProcessListEntryProcessor>();
private long reportTimestamp;
public class KeyValIntPair implements Comparable<KeyValIntPair>
{
public String name;
public int value;
public KeyValIntPair(String name, int value)
{
this.name = name;
this.value = value;
}
@Override
public int compareTo(KeyValIntPair o) {
if(o==null)return -1;
return o.value - value;//reverse order
}
}
public void addAppProcessor(ProcessListEntryProcessor pr)
{
this.appProcessorList.add(pr);
}
public ProcessListSummary()
{
}
//TODO
/**
* It should be invoked after processlist set
*/
public void summarize()
{
if(this.processList==null)return ;//something wrong
for(ProcessListEntry e: this.processList)
{
if("system user".equalsIgnoreCase(e.getUser()))continue;//ignore replication
//update user
if(!this.userSummary.containsKey(e.getUser()))
this.userSummary.put(e.getUser(), 1);
else
this.userSummary.put(e.getUser(), this.userSummary.get(e.getUser())+1);
//update host
if(!this.hostSummary.containsKey(e.getHost()))
this.hostSummary.put(e.getHost(), 1);
else
this.hostSummary.put(e.getHost(), this.hostSummary.get(e.getHost())+1);
//state
if(e.getState()!=null)//Sleep command has no state
{
if(!this.stateSummary.containsKey(e.getState()))
this.stateSummary.put(e.getState(), 1);
else
this.stateSummary.put(e.getState(), this.stateSummary.get(e.getState())+1);
}
//command
if(!this.commandSummary.containsKey(e.getCommand()))
this.commandSummary.put(e.getCommand(), 1);
else
this.commandSummary.put(e.getCommand(), this.commandSummary.get(e.getCommand())+1);
//command host: TODO
if(!this.commandHostSummary.containsKey(e.getHost()+"-"+e.getCommand()))
this.commandHostSummary.put(e.getHost()+"-"+e.getCommand(), 1);
else
this.commandHostSummary.put(e.getHost()+"-"+e.getCommand(), this.commandHostSummary.get(e.getHost()+"-"+e.getCommand())+1);
//now active SQL only
if("sleep".equalsIgnoreCase(e.getCommand()))continue;
//1. Strip comments
String s = stripComments(e.getInfo());
if(s==null||s.isEmpty())continue;
int t = e.getTime();
if(t==Integer.MAX_VALUE||t<0)t = 0;//overflow
ProcessListEntryAggregate.updateDataMap(this.queries, s, t);
totalAccumultaedTime+=t;
String ns = this.normalize(s);
ProcessListEntryAggregate.updateDataMap(this.normalizedQueries, ns, t);
String s2 = this.stripLimit(s);
ProcessListEntryAggregate.updateDataMap(this.queriesWithLimitStripped, s2, t);
for(ProcessListEntryProcessor pr:this.appProcessorList)
pr.processEntry(e);
}
}
public Map<String, Integer> getUserSummary() {
return userSummary;
}
public void setUserSummary(Map<String, Integer> userSummary) {
this.userSummary = userSummary;
}
public Map<String, Integer> getHostSummary() {
return hostSummary;
}
public void setHostSummary(Map<String, Integer> hostSummary) {
this.hostSummary = hostSummary;
}
public Map<String, Integer> getCommandSummary() {
return commandSummary;
}
public void setCommandSummary(Map<String, Integer> commandSummary) {
this.commandSummary = commandSummary;
}
public Map<String, Integer> getCommandHostSummary() {
return commandHostSummary;
}
public void setCommandHostSummary(Map<String, Integer> commandHostSummary) {
this.commandHostSummary = commandHostSummary;
}
public Map<String, Integer> getStateSummary() {
return stateSummary;
}
public void setStateSummary(Map<String, Integer> stateSummary) {
this.stateSummary = stateSummary;
}
/**
* Remove leading comments only
* @param str
* @return
*/
private static String stripComments(String str)
{
if(str==null)return "";
String s = str.trim();
while(s.startsWith("/*"))
{
int idx = s.indexOf("*/");
if(idx<0)return s;//might not be valie
s = s.substring(idx+2);
if(s==null)return s;
else s = s.trim();
}
return s;
}
//TODO in case there is a LIMIT in the middle
private String stripLimit(String str)
{
if(str==null)return "";
String s = str.trim();
Matcher mt = pt.matcher(s);
if(!mt.find())
return s;
if(mt.end()==s.length())
{
s = s.substring(0, mt.start());
if(s!=null)s=s.trim();
return s;
}
mt = pt2.matcher(s);
if(!mt.find())
return s;
if(mt.end()==s.length())
{
s = s.substring(0, mt.start());
if(s!=null)s=s.trim();
return s;
}
return s;//don't care middle one
}
/**
* Normalize a query string
* @param str
* @return
*/
private String normalize(String str)
{
String rpt3 = "(?i)VALUES\\s*\\((.*?)\\)";
String rpt4 = "(?i)in\\s*\\([^\\(^\\)]+\\)";
String rpt5 = "(?i)limit\\s+\\d+\\s*$" ;
String rpt6 = "(?i)limit\\s+\\d+\\s*,\\s*\\d+\\s*$" ;
str = str.replaceAll("=\\s*\\d+", "=?");
str = str.replaceAll("=\\s*'[^']+'", "=?");
str = str.replaceAll("\\+\\s*'?\\d+'?", "+?");
str = str.replaceAll("\\-\\s*'?\\d+'?", "-?");
str = str.replaceAll(">\\s*\\d+", ">?");
str = str.replaceAll(">\\s*'[^']+'", ">?");
str = str.replaceAll(">=\\s*\\d+", ">=?");
str = str.replaceAll(">=\\s*'[^']+'", ">=?");
str = str.replaceAll("<\\s*\\d+", "<?");
str = str.replaceAll("<\\s*'[^']+'", "<?");
str = str.replaceAll("<=\\s*'?\\d+'?", "<=?");
str = str.replaceAll("<=\\s*'[^']+'", "<=?");
str = str.replaceAll("(?i)BETWEEN\\s+\\d+\\s+AND\\s+\\d+", "BETWEEN ? and ?");
str = str.replaceAll("(?i)BETWEEN\\s+'[^']+'\\s+AND\\s+'[^']+'", "BETWEEN ? and ?");
//BETWEEN 1391040000 AND 1391126400
str = str.replaceAll(rpt4, "IN (?)");
str = str.replaceAll(rpt3, "VALUES (?)");
str = str.replaceAll(rpt5, "LIMIT ?");
str = str.replaceAll(rpt6, "LIMIT ?, ?");
//str = str.replaceAll("\\((\\s*'?\\d+'?\\s*,?)+", "(?");
//add special treatment for big inlist
str = replaceInlist(str);
return str;
}
/**
* We might have a case processlist cannot print the full inlist
* This should be used after all other normalize operations
* @param str
* @return
*/
private String replaceInlist(String str)
{
try
{
Pattern pt = Pattern.compile("(?i)\\s*IN\\s*\\('?\\d+'?,");
Matcher m = pt.matcher(str);
if(m.find())
{
int start = m.start();
//find first close bracket )
int idx = str.indexOf(')', start);
if(idx<0 || idx==str.length()-1)
return str.substring(0, start)+" IN (......)";
else
return str.substring(0, start)+" IN (...) "+str.substring(idx+1);
}
}catch(Exception ex){}
return str;
}
public Map<String, ProcessListEntryAggregate> getQueries() {
return queries;
}
public Map<String, ProcessListEntryAggregate> getQueriesWithLimitStripped() {
return queriesWithLimitStripped;
}
public List<ProcessListEntry> getProcessList() {
return processList;
}
public void setProcessList(List<ProcessListEntry> processList) {
this.processList = processList;
}
public void saveAsText(PrintWriter pw)
{
pw.println("Total Time: "+this.totalAccumultaedTime+" seconds.");
pw.println();
pw.println("------ User Summary ------");
List<KeyValIntPair> tmpList = new ArrayList<KeyValIntPair>(this.userSummary.size());
for(Map.Entry<String, Integer> e: this.userSummary.entrySet())
{
tmpList.add(new KeyValIntPair(e.getKey(), e.getValue()));
}
java.util.Collections.sort(tmpList);
for(KeyValIntPair p: tmpList)
{
//pw.println(p.name+": "+p.value);
pw.println(String.format("%5d: %s", p.value, p.name));
}
tmpList.clear();
int count = 0;
pw.println();
pw.println("------ Host Summary ------");
tmpList = new ArrayList<KeyValIntPair>(this.hostSummary.size());
for(Map.Entry<String, Integer> e: this.hostSummary.entrySet())
{
tmpList.add(new KeyValIntPair(e.getKey(), e.getValue()));
count++;
if(count>=20)break;
}
java.util.Collections.sort(tmpList);
for(KeyValIntPair p: tmpList)
pw.println(String.format("%5d: %s", p.value, p.name));
tmpList.clear();
pw.println();
pw.println("------ Command Summary ------");
tmpList = new ArrayList<KeyValIntPair>(this.commandSummary.size());
for(Map.Entry<String, Integer> e: this.commandSummary.entrySet())
{
tmpList.add(new KeyValIntPair(e.getKey(), e.getValue()));
}
java.util.Collections.sort(tmpList);
for(KeyValIntPair p: tmpList)
pw.println(String.format("%5d: %s", p.value, p.name));
tmpList.clear();
pw.println();
pw.println("------ State Summary ------");
tmpList = new ArrayList<KeyValIntPair>(this.stateSummary.size());
for(Map.Entry<String, Integer> e: this.stateSummary.entrySet())
{
tmpList.add(new KeyValIntPair(e.getKey(), e.getValue()));
}
java.util.Collections.sort(tmpList);
for(KeyValIntPair p: tmpList)
pw.println(String.format("%5d: %s", p.value, p.name));
tmpList.clear();
pw.println();
pw.println("------ Query Summary With LIMIT Stripped------");
ArrayList<ProcessListEntryAggregate> tmpList2 = new ArrayList<ProcessListEntryAggregate>(this.queriesWithLimitStripped.size());
for(Map.Entry<String, ProcessListEntryAggregate> e: this.queriesWithLimitStripped.entrySet())
{
tmpList2.add(e.getValue());
}
java.util.Collections.sort(tmpList2, new ProcessListEntryAggregate.SortByCount());
count = 0;
for(ProcessListEntryAggregate p: tmpList2)
{
pw.println(p.getChecksum()+": "+p.getSql());
pw.println(String.format("%s: count - %d, time - %d sec, avg - %3f sec, min - %d sec, max -%d sec",
p.getChecksum(), p.getCount(), p.getTotal_time_sec(), p.getAverage(), p.getMin_time_sec(), p.getMax_time_sec()));
count++;
if(count>20)break;//since we sort them and attach process list, display only top 20
}
//tmpList2.clear();
pw.println();
pw.println("------ Query Time Summary With LIMIT Stripped------");
java.util.Collections.sort(tmpList2);
count = 0;
for(ProcessListEntryAggregate p: tmpList2)
{
pw.println(p.getChecksum()+": "+p.getSql());
pw.println(String.format("%s: count - %d, time - %d sec, avg - %3f sec, min - %d sec, max -%d sec",
p.getChecksum(), p.getCount(), p.getTotal_time_sec(), p.getAverage(), p.getMin_time_sec(), p.getMax_time_sec()));
count++;
if(count>20)break;//since we sort them and attach process list, display only top 20
}
tmpList2.clear();
pw.println();
pw.println("------ Normalized Queries------");
tmpList2 = new ArrayList<ProcessListEntryAggregate>(this.normalizedQueries.size());
for(Map.Entry<String, ProcessListEntryAggregate> e: this.normalizedQueries.entrySet())
{
tmpList2.add(e.getValue());
}
java.util.Collections.sort(tmpList2, new ProcessListEntryAggregate.SortByCount());
count = 0;
for(ProcessListEntryAggregate p: tmpList2)
{
pw.println(p.getChecksum()+": "+p.getSql());
pw.println(String.format("%s: count - %d, time - %d sec, avg - %3f sec, min - %d sec, max -%d sec",
p.getChecksum(), p.getCount(), p.getTotal_time_sec(), p.getAverage(), p.getMin_time_sec(), p.getMax_time_sec()));
count++;
if(count>20)break;//since we sort them and attach process list, display only top 20
}
//tmpList2.clear();
pw.println();
pw.println("------ Time Summary With Normalized Queries------");
java.util.Collections.sort(tmpList2);
count = 0;
for(ProcessListEntryAggregate p: tmpList2)
{
pw.println(p.getChecksum()+": "+p.getSql());
pw.println(String.format("%s: count - %d, time - %d sec, avg - %3f sec, min - %d sec, max -%d sec",
p.getChecksum(), p.getCount(), p.getTotal_time_sec(), p.getAverage(), p.getMin_time_sec(), p.getMax_time_sec()));
count++;
if(count>20)break;//since we sort them and attach process list, display only top 20
}
tmpList2.clear();
//app specific
for(ProcessListEntryProcessor pr:this.appProcessorList)
pr.dumpSummary(pw);
java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
pw.println();
pw.println("------ Active Process List ------");
int idx = 1;
for(ProcessListEntry e: this.processList)
{
if("Sleep".equalsIgnoreCase(e.getCommand()))continue;//ignore sleep one
//dump the process list
pw.println("------ Query "+idx+" ---");
pw.println(e.getInfo());
String ts = "";
try
{
ts = sdf.format(new java.util.Date(this.reportTimestamp - e.getTime()*1000));
}catch(Exception ex){}
pw.println("Time: "+e.getTime()+", Ts: "+ts+", State: "+e.getState()+", User: "+e.getUser()+", DB: "+e.getDb()+", Host: "+e.getHost());
if(e.getRows_examined()>0 || e.getRows_read()>0 ||e.getRows_sent()>0)
{
pw.println("Rows_examined: "+e.getRows_examined()+", Rows_read: "+e.getRows_read()+", Rows_sent: "+e.getRows_sent());
}
idx++;
}
if(this.innodbStatus!=null)
{
pw.println();
pw.println("------ Innodb Status ------");
java.io.StringReader reader = new java.io.StringReader(this.innodbStatus);
java.io.BufferedReader bufReader = new java.io.BufferedReader(reader);
String line = null;
boolean skip = false;
try
{
while( (line = bufReader.readLine())!=null)
{
if(line.startsWith("---TRANSACTION ") && line.contains("not started") && !line.contains("estimating"))
skip = true;
else if(skip)
{
if(line.startsWith("--------")||line.startsWith("---TRANSACTION "))
skip = false;
}
if(!skip)
pw.println(line);
}
}catch(Exception ex)
{
logger.log(Level.INFO, "innodb status parsing error", ex);
}finally
{
if(bufReader!=null)try{bufReader.close();}catch(Exception iex){}
}
}
if(this.lockList!=null && this.lockList.getRows().size()>0)
{
pw.println();
pw.println("------ InnoDB Locks ------");
AlertReport.printList(pw, this.lockList);
}
if(this.clientList!=null && this.clientList.getRows().size()>0)
{
pw.println();
pw.println("------ Client Statistics ------");
AlertReport.printList(pw, this.clientList);
}
if(this.trxList!=null && this.trxList.getRows().size()>0)
{
pw.println();
pw.println("------ InnoDB TRX LONGER THAN 60 SECONDS ------");
AlertReport.printList(pw, this.trxList);
}
}
public Map<String, ProcessListEntryAggregate> getNormalizedQueries() {
return normalizedQueries;
}
public String getInnodbStatus() {
return innodbStatus;
}
public void setInnodbStatus(String innodbStatus) {
this.innodbStatus = innodbStatus;
}
public long getReportTimestamp() {
return reportTimestamp;
}
public void setReportTimestamp(long reportTimestamp) {
this.reportTimestamp = reportTimestamp;
}
public ResultList getLockList() {
return lockList;
}
public void setLockList(ResultList lockList) {
this.lockList = lockList;
}
public ResultList getClientList() {
return clientList;
}
public void setClientList(ResultList clientList) {
this.clientList = clientList;
}
public ResultList getTrxList() {
return trxList;
}
public void setTrxList(ResultList trxList) {
this.trxList = trxList;
}
}
| |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.webui.servlet;
import java.io.IOException;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dspace.app.webui.util.UIUtil;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.browse.BrowseEngine;
import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex;
import org.dspace.browse.BrowseInfo;
import org.dspace.browse.BrowserScope;
import org.dspace.sort.SortOption;
import org.dspace.sort.SortException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
/**
* Servlet for browsing through indices, as they are defined in
* the configuration. This class can take a wide variety of inputs from
* the user interface:
*
* - type: the type of browse (index name) being performed
* - order: (ASC | DESC) the direction for result sorting
* - value: A specific value to find items around. For example the author name or subject
* - month: integer specification of the month of a date browse
* - year: integer specification of the year of a date browse
* - starts_with: string value at which to start browsing
* - vfocus: start browsing with a value of this string
* - focus: integer id of the item at which to start browsing
* - rpp: integer number of results per page to display
* - sort_by: integer specification of the field to search on
* - etal: integer number to limit multiple value items specified in config to
*
* @author Richard Jones
* @version $Revision$
*/
public abstract class AbstractBrowserServlet extends DSpaceServlet
{
/** log4j category */
private static Logger log = Logger.getLogger(AbstractBrowserServlet.class);
public AbstractBrowserServlet()
{
super();
}
/**
* Create a BrowserScope from the current request
*
* @param context The database context
* @param request The servlet request
* @param response The servlet response
* @return A BrowserScope for the current parameters
* @throws ServletException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
protected BrowserScope getBrowserScopeForRequest(Context context, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException, AuthorizeException
{
try
{
// first, lift all the stuff out of the request that we might need
String type = request.getParameter("type");
String order = request.getParameter("order");
String value = request.getParameter("value");
String valueLang = request.getParameter("value_lang");
String month = request.getParameter("month");
String year = request.getParameter("year");
String startsWith = request.getParameter("starts_with");
String valueFocus = request.getParameter("vfocus");
String valueFocusLang = request.getParameter("vfocus_lang");
String authority = request.getParameter("authority");
int focus = UIUtil.getIntParameter(request, "focus");
int offset = UIUtil.getIntParameter(request, "offset");
int resultsperpage = UIUtil.getIntParameter(request, "rpp");
int sortBy = UIUtil.getIntParameter(request, "sort_by");
int etAl = UIUtil.getIntParameter(request, "etal");
// get the community or collection location for the browse request
// Note that we are only interested in getting the "smallest" container,
// so if we find a collection, we don't bother looking up the community
Collection collection = null;
Community community = null;
collection = UIUtil.getCollectionLocation(request);
if (collection == null)
{
community = UIUtil.getCommunityLocation(request);
}
// process the input, performing some inline validation
BrowseIndex bi = null;
if (type != null && !"".equals(type))
{
bi = BrowseIndex.getBrowseIndex(type);
}
if (bi == null)
{
if (sortBy > 0)
{
bi = BrowseIndex.getBrowseIndex(SortOption.getSortOption(sortBy));
}
else
{
bi = BrowseIndex.getBrowseIndex(SortOption.getDefaultSortOption());
}
}
// If we don't have a sort column
if (bi != null && sortBy == -1)
{
// Get the default one
SortOption so = bi.getSortOption();
if (so != null)
{
sortBy = so.getNumber();
}
}
else if (bi != null && bi.isItemIndex() && !bi.isInternalIndex())
{
// If a default sort option is specified by the index, but it isn't
// the same as sort option requested, attempt to find an index that
// is configured to use that sort by default
// This is so that we can then highlight the correct option in the navigation
SortOption bso = bi.getSortOption();
SortOption so = SortOption.getSortOption(sortBy);
if ( bso != null && bso.equals(so))
{
BrowseIndex newBi = BrowseIndex.getBrowseIndex(so);
if (newBi != null)
{
bi = newBi;
type = bi.getName();
}
}
}
if (order == null && bi != null)
{
order = bi.getDefaultOrder();
}
// If the offset is invalid, reset to 0
if (offset < 0)
{
offset = 0;
}
// if no resultsperpage set, default to 20
if (resultsperpage < 0)
{
resultsperpage = 20;
}
// if year and perhaps month have been selected, we translate these into "startsWith"
// if startsWith has already been defined then it is overwritten
if (year != null && !"".equals(year) && !"-1".equals(year))
{
startsWith = year;
if ((month != null) && !"-1".equals(month) && !"".equals(month))
{
// subtract 1 from the month, so the match works appropriately
if ("ASC".equals(order))
{
month = Integer.toString((Integer.parseInt(month) - 1));
}
// They've selected a month as well
if (month.length() == 1)
{
// Ensure double-digit month number
month = "0" + month;
}
startsWith = year + "-" + month;
if ("ASC".equals(order))
{
startsWith = startsWith + "-32";
}
}
}
// determine which level of the browse we are at: 0 for top, 1 for second
int level = 0;
if (value != null || authority != null)
{
level = 1;
}
// if sortBy is still not set, set it to 0, which is default to use the primary index value
if (sortBy == -1)
{
sortBy = 0;
}
// figure out the setting for author list truncation
if (etAl == -1) // there is no limit, or the UI says to use the default
{
int limitLine = ConfigurationManager.getIntProperty("webui.browse.author-limit");
if (limitLine != 0)
{
etAl = limitLine;
}
}
else // if the user has set a limit
{
if (etAl == 0) // 0 is the user setting for unlimited
{
etAl = -1; // but -1 is the application setting for unlimited
}
}
// log the request
String comHandle = "n/a";
if (community != null)
{
comHandle = community.getHandle();
}
String colHandle = "n/a";
if (collection != null)
{
colHandle = collection.getHandle();
}
String arguments = "type=" + type + ",order=" + order + ",value=" + value +
",month=" + month + ",year=" + year + ",starts_with=" + startsWith +
",vfocus=" + valueFocus + ",focus=" + focus + ",rpp=" + resultsperpage +
",sort_by=" + sortBy + ",community=" + comHandle + ",collection=" + colHandle +
",level=" + level + ",etal=" + etAl;
log.info(LogManager.getHeader(context, "browse", arguments));
// set up a BrowseScope and start loading the values into it
BrowserScope scope = new BrowserScope(context);
scope.setBrowseIndex(bi);
scope.setOrder(order);
scope.setFilterValue(value != null?value:authority);
scope.setFilterValueLang(valueLang);
scope.setJumpToItem(focus);
scope.setJumpToValue(valueFocus);
scope.setJumpToValueLang(valueFocusLang);
scope.setStartsWith(startsWith);
scope.setOffset(offset);
scope.setResultsPerPage(resultsperpage);
scope.setSortBy(sortBy);
scope.setBrowseLevel(level);
scope.setEtAl(etAl);
scope.setAuthorityValue(authority);
// assign the scope of either Community or Collection if necessary
if (community != null)
{
scope.setBrowseContainer(community);
}
else if (collection != null)
{
scope.setBrowseContainer(collection);
}
// For second level browses on metadata indexes, we need to adjust the default sorting
if (bi != null && bi.isMetadataIndex() && scope.isSecondLevel() && scope.getSortBy() <= 0)
{
scope.setSortBy(1);
}
return scope;
}
catch (SortException se)
{
log.error("caught exception: ", se);
throw new ServletException(se);
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new ServletException(e);
}
}
/**
* Do the usual DSpace GET method. You will notice that browse does not currently
* respond to POST requests.
*/
protected void processBrowse(Context context, BrowserScope scope, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException,
AuthorizeException
{
try
{
BrowseIndex bi = scope.getBrowseIndex();
// now start up a browse engine and get it to do the work for us
BrowseEngine be = new BrowseEngine(context);
BrowseInfo binfo = be.browse(scope);
request.setAttribute("browse.info", binfo);
if (AuthorizeManager.isAdmin(context))
{
// Set a variable to create admin buttons
request.setAttribute("admin_button", Boolean.TRUE);
}
if (binfo.hasResults())
{
if (bi.isMetadataIndex() && !scope.isSecondLevel())
{
showSinglePage(context, request, response);
}
else
{
showFullPage(context, request, response);
}
}
else
{
showNoResultsPage(context, request, response);
}
}
catch (BrowseException e)
{
log.error("caught exception: ", e);
throw new ServletException(e);
}
}
/**
* Display the error page
*
* @param context
* @param request
* @param response
* @throws ServletException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
protected abstract void showError(Context context, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException,
AuthorizeException;
/**
* Display the No Results page
*
* @param context
* @param request
* @param response
* @throws ServletException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
protected abstract void showNoResultsPage(Context context, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException,
AuthorizeException;
/**
* Display the single page. This is the page which lists just the single values of a
* metadata browse, not individual items. Single values are links through to all the items
* that match that metadata value
*
* @param context
* @param request
* @param response
* @throws ServletException
* @throws IOException
* @throws SQLException
* @throws AuthorizeException
*/
protected abstract void showSinglePage(Context context, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException,
AuthorizeException;
protected abstract void showFullPage(Context context, HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException,
AuthorizeException;
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.kerby.kerberos.kerb.client;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* JAAS utilities for Kerberos login.
*/
public final class JaasKrbUtil {
public static final boolean ENABLE_DEBUG = true;
private JaasKrbUtil() { }
public static Subject loginUsingPassword(
String principal, String password) throws LoginException {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals,
new HashSet<Object>(), new HashSet<Object>());
Configuration conf = usePassword(principal);
String confName = "PasswordConf";
CallbackHandler callback = new KrbCallbackHandler(principal, password);
LoginContext loginContext = new LoginContext(confName, subject, callback, conf);
loginContext.login();
return loginContext.getSubject();
}
public static Subject loginUsingTicketCache(
String principal, File cacheFile) throws LoginException {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals,
new HashSet<Object>(), new HashSet<Object>());
Configuration conf = useTicketCache(principal, cacheFile);
String confName = "TicketCacheConf";
LoginContext loginContext = new LoginContext(confName, subject, null, conf);
loginContext.login();
return loginContext.getSubject();
}
public static Subject loginUsingKeytab(
String principal, File keytabFile) throws LoginException {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals,
new HashSet<Object>(), new HashSet<Object>());
Configuration conf = useKeytab(principal, keytabFile);
String confName = "KeytabConf";
LoginContext loginContext = new LoginContext(confName, subject, null, conf);
loginContext.login();
return loginContext.getSubject();
}
public static Configuration usePassword(String principal) {
return new PasswordJaasConf(principal);
}
public static Configuration useTicketCache(String principal,
File credentialFile) {
return new TicketCacheJaasConf(principal, credentialFile);
}
public static Configuration useKeytab(String principal, File keytabFile) {
return new KeytabJaasConf(principal, keytabFile);
}
private static String getKrb5LoginModuleName() {
return System.getProperty("java.vendor").contains("IBM")
? "com.ibm.security.auth.module.Krb5LoginModule"
: "com.sun.security.auth.module.Krb5LoginModule";
}
static class KeytabJaasConf extends Configuration {
private String principal;
private File keytabFile;
public KeytabJaasConf(String principal, File keytab) {
this.principal = principal;
this.keytabFile = keytab;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytabFile.getAbsolutePath());
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("renewTGT", "false");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
options.put("debug", String.valueOf(ENABLE_DEBUG));
return new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
}
static class TicketCacheJaasConf extends Configuration {
private String principal;
private File clientCredentialFile;
public TicketCacheJaasConf(String principal, File clientCredentialFile) {
this.principal = principal;
this.clientCredentialFile = clientCredentialFile;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("principal", principal);
options.put("storeKey", "false");
options.put("doNotPrompt", "false");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
options.put("ticketCache", clientCredentialFile.getAbsolutePath());
options.put("debug", String.valueOf(ENABLE_DEBUG));
return new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
}
static class PasswordJaasConf extends Configuration {
private String principal;
public PasswordJaasConf(String principal) {
this.principal = principal;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<>();
options.put("principal", principal);
options.put("storeKey", "true");
options.put("useTicketCache", "true");
options.put("useKeyTab", "false");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "true");
options.put("debug", String.valueOf(ENABLE_DEBUG));
return new AppConfigurationEntry[]{
new AppConfigurationEntry(getKrb5LoginModuleName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options)};
}
}
public static class KrbCallbackHandler implements CallbackHandler {
private String principal;
private String password;
public KrbCallbackHandler(String principal, String password) {
this.principal = principal;
this.password = password;
}
public void handle(Callback[] callbacks)
throws IOException, UnsupportedCallbackException {
for (int i = 0; i < callbacks.length; i++) {
if (callbacks[i] instanceof PasswordCallback) {
PasswordCallback pc = (PasswordCallback) callbacks[i];
if (pc.getPrompt().contains(principal)) {
pc.setPassword(password.toCharArray());
break;
}
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
/**
* The azure-eventhubs component that integrates Azure Event Hubs using AMQP
* protocol. Azure EventHubs is a highly scalable publish-subscribe service that
* can ingest millions of events per second and stream them to multiple
* consumers.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface EventHubsEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the Azure Event Hubs component.
*/
public interface EventHubsEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedEventHubsEndpointConsumerBuilder advanced() {
return (AdvancedEventHubsEndpointConsumerBuilder) this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder amqpRetryOptions(
Object amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder amqpRetryOptions(
String amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder amqpTransportType(
AmqpTransportType amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder amqpTransportType(
String amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder autoDiscoverClient(
boolean autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder autoDiscoverClient(
String autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* In case you chose the default BlobCheckpointStore, this sets access
* key for the associated azure account name to be used for
* authentication with azure blob services.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param blobAccessKey the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder blobAccessKey(
String blobAccessKey) {
doSetProperty("blobAccessKey", blobAccessKey);
return this;
}
/**
* In case you chose the default BlobCheckpointStore, this sets Azure
* account name to be used for authentication with azure blob services.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param blobAccountName the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder blobAccountName(
String blobAccountName) {
doSetProperty("blobAccountName", blobAccountName);
return this;
}
/**
* In case you chose the default BlobCheckpointStore, this sets the blob
* container that shall be used by the BlobCheckpointStore to store the
* checkpoint offsets.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param blobContainerName the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder blobContainerName(
String blobContainerName) {
doSetProperty("blobContainerName", blobContainerName);
return this;
}
/**
* In case you chose the default BlobCheckpointStore,
* StorageSharedKeyCredential can be injected to create the azure
* client, this holds the important authentication information.
*
* The option is a:
* <code>com.azure.storage.common.StorageSharedKeyCredential</code> type.
*
* Group: consumer
*
* @param blobStorageSharedKeyCredential the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder blobStorageSharedKeyCredential(
Object blobStorageSharedKeyCredential) {
doSetProperty("blobStorageSharedKeyCredential", blobStorageSharedKeyCredential);
return this;
}
/**
* In case you chose the default BlobCheckpointStore,
* StorageSharedKeyCredential can be injected to create the azure
* client, this holds the important authentication information.
*
* The option will be converted to a
* <code>com.azure.storage.common.StorageSharedKeyCredential</code> type.
*
* Group: consumer
*
* @param blobStorageSharedKeyCredential the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder blobStorageSharedKeyCredential(
String blobStorageSharedKeyCredential) {
doSetProperty("blobStorageSharedKeyCredential", blobStorageSharedKeyCredential);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Sets the CheckpointStore the EventProcessorClient will use for
* storing partition ownership and checkpoint information. Users can,
* optionally, provide their own implementation of CheckpointStore which
* will store ownership and checkpoint information. By default it set to
* use
* com.azure.messaging.eventhubs.checkpointstore.blob.BlobCheckpointStore which stores all checkpoint offsets into Azure Blob Storage.
*
* The option is a:
* <code>com.azure.messaging.eventhubs.CheckpointStore</code> type.
*
* Default: BlobCheckpointStore
* Group: consumer
*
* @param checkpointStore the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder checkpointStore(
Object checkpointStore) {
doSetProperty("checkpointStore", checkpointStore);
return this;
}
/**
* Sets the CheckpointStore the EventProcessorClient will use for
* storing partition ownership and checkpoint information. Users can,
* optionally, provide their own implementation of CheckpointStore which
* will store ownership and checkpoint information. By default it set to
* use
* com.azure.messaging.eventhubs.checkpointstore.blob.BlobCheckpointStore which stores all checkpoint offsets into Azure Blob Storage.
*
* The option will be converted to a
* <code>com.azure.messaging.eventhubs.CheckpointStore</code> type.
*
* Default: BlobCheckpointStore
* Group: consumer
*
* @param checkpointStore the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder checkpointStore(
String checkpointStore) {
doSetProperty("checkpointStore", checkpointStore);
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with.
* Events are read in the context of this group. The name of the
* consumer group that is created by default is {link
* #DEFAULT_CONSUMER_GROUP_NAME $Default}.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: $Default
* Group: consumer
*
* @param consumerGroupName the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder consumerGroupName(
String consumerGroupName) {
doSetProperty("consumerGroupName", consumerGroupName);
return this;
}
/**
* Sets the map containing the event position to use for each partition
* if a checkpoint for the partition does not exist in CheckpointStore.
* This map is keyed off of the partition id. If there is no checkpoint
* in CheckpointStore and there is no entry in this map, the processing
* of the partition will start from {link EventPosition#latest() latest}
* position.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* com.azure.messaging.eventhubs.models.EventPosition&gt;</code> type.
*
* Group: consumer
*
* @param eventPosition the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder eventPosition(
Map<String, Object> eventPosition) {
doSetProperty("eventPosition", eventPosition);
return this;
}
/**
* Sets the map containing the event position to use for each partition
* if a checkpoint for the partition does not exist in CheckpointStore.
* This map is keyed off of the partition id. If there is no checkpoint
* in CheckpointStore and there is no entry in this map, the processing
* of the partition will start from {link EventPosition#latest() latest}
* position.
*
* The option will be converted to a
* <code>java.util.Map&lt;java.lang.String,
* com.azure.messaging.eventhubs.models.EventPosition&gt;</code> type.
*
* Group: consumer
*
* @param eventPosition the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder eventPosition(
String eventPosition) {
doSetProperty("eventPosition", eventPosition);
return this;
}
/**
* Sets the count used by the receiver to control the number of events
* the Event Hub consumer will actively receive and queue locally
* without regard to whether a receive operation is currently active.
*
* The option is a: <code>int</code> type.
*
* Default: 500
* Group: consumer
*
* @param prefetchCount the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder prefetchCount(int prefetchCount) {
doSetProperty("prefetchCount", prefetchCount);
return this;
}
/**
* Sets the count used by the receiver to control the number of events
* the Event Hub consumer will actively receive and queue locally
* without regard to whether a receive operation is currently active.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 500
* Group: consumer
*
* @param prefetchCount the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder prefetchCount(
String prefetchCount) {
doSetProperty("prefetchCount", prefetchCount);
return this;
}
/**
* Instead of supplying namespace, sharedAccessKey, sharedAccessName ...
* etc, you can just supply the connection string for your eventHub. The
* connection string for EventHubs already include all the necessary
* information to connection to your EventHub. To learn on how to
* generate the connection string, take a look at this documentation:
* https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param connectionString the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder connectionString(
String connectionString) {
doSetProperty("connectionString", connectionString);
return this;
}
/**
* The generated value for the SharedAccessName.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessKey the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder sharedAccessKey(
String sharedAccessKey) {
doSetProperty("sharedAccessKey", sharedAccessKey);
return this;
}
/**
* The name you chose for your EventHubs SAS keys.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessName the value to set
* @return the dsl builder
*/
default EventHubsEndpointConsumerBuilder sharedAccessName(
String sharedAccessName) {
doSetProperty("sharedAccessName", sharedAccessName);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Azure Event Hubs
* component.
*/
public interface AdvancedEventHubsEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default EventHubsEndpointConsumerBuilder basic() {
return (EventHubsEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a:
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedEventHubsEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedEventHubsEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a:
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedEventHubsEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedEventHubsEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the Azure Event Hubs component.
*/
public interface EventHubsEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedEventHubsEndpointProducerBuilder advanced() {
return (AdvancedEventHubsEndpointProducerBuilder) this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder amqpRetryOptions(
Object amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder amqpRetryOptions(
String amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder amqpTransportType(
AmqpTransportType amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder amqpTransportType(
String amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder autoDiscoverClient(
boolean autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder autoDiscoverClient(
String autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Sets the identifier of the Event Hub partition that the events will
* be sent to. If the identifier is not specified, the Event Hubs
* service will be responsible for routing events that are sent to an
* available partition.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param partitionId the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder partitionId(String partitionId) {
doSetProperty("partitionId", partitionId);
return this;
}
/**
* Sets a hashing key to be provided for the batch of events, which
* instructs the Event Hubs service to map this key to a specific
* partition. The selection of a partition is stable for a given
* partition hashing key. Should any other batches of events be sent
* using the same exact partition hashing key, the Event Hubs service
* will route them all to the same partition. This should be specified
* only when there is a need to group events by partition, but there is
* flexibility into which partition they are routed. If ensuring that a
* batch of events is sent only to a specific partition, it is
* recommended that the {link #setPartitionId(String) identifier of the
* position be specified directly} when sending the batch.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param partitionKey the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder partitionKey(
String partitionKey) {
doSetProperty("partitionKey", partitionKey);
return this;
}
/**
* Sets the EventHubProducerAsyncClient.An asynchronous producer
* responsible for transmitting EventData to a specific Event Hub,
* grouped together in batches. Depending on the options specified when
* creating an {linkEventDataBatch}, the events may be automatically
* routed to an available partition or specific to a partition. Use by
* this component to produce the data in camel producer.
*
* The option is a:
* <code>com.azure.messaging.eventhubs.EventHubProducerAsyncClient</code> type.
*
* Group: producer
*
* @param producerAsyncClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder producerAsyncClient(
Object producerAsyncClient) {
doSetProperty("producerAsyncClient", producerAsyncClient);
return this;
}
/**
* Sets the EventHubProducerAsyncClient.An asynchronous producer
* responsible for transmitting EventData to a specific Event Hub,
* grouped together in batches. Depending on the options specified when
* creating an {linkEventDataBatch}, the events may be automatically
* routed to an available partition or specific to a partition. Use by
* this component to produce the data in camel producer.
*
* The option will be converted to a
* <code>com.azure.messaging.eventhubs.EventHubProducerAsyncClient</code> type.
*
* Group: producer
*
* @param producerAsyncClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder producerAsyncClient(
String producerAsyncClient) {
doSetProperty("producerAsyncClient", producerAsyncClient);
return this;
}
/**
* Instead of supplying namespace, sharedAccessKey, sharedAccessName ...
* etc, you can just supply the connection string for your eventHub. The
* connection string for EventHubs already include all the necessary
* information to connection to your EventHub. To learn on how to
* generate the connection string, take a look at this documentation:
* https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param connectionString the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder connectionString(
String connectionString) {
doSetProperty("connectionString", connectionString);
return this;
}
/**
* The generated value for the SharedAccessName.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessKey the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder sharedAccessKey(
String sharedAccessKey) {
doSetProperty("sharedAccessKey", sharedAccessKey);
return this;
}
/**
* The name you chose for your EventHubs SAS keys.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessName the value to set
* @return the dsl builder
*/
default EventHubsEndpointProducerBuilder sharedAccessName(
String sharedAccessName) {
doSetProperty("sharedAccessName", sharedAccessName);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Azure Event Hubs
* component.
*/
public interface AdvancedEventHubsEndpointProducerBuilder
extends
EndpointProducerBuilder {
default EventHubsEndpointProducerBuilder basic() {
return (EventHubsEndpointProducerBuilder) this;
}
}
/**
* Builder for endpoint for the Azure Event Hubs component.
*/
public interface EventHubsEndpointBuilder
extends
EventHubsEndpointConsumerBuilder,
EventHubsEndpointProducerBuilder {
default AdvancedEventHubsEndpointBuilder advanced() {
return (AdvancedEventHubsEndpointBuilder) this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder amqpRetryOptions(
Object amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the retry policy for EventHubAsyncClient. If not specified, the
* default retry options are used.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpRetryOptions</code> type.
*
* Group: common
*
* @param amqpRetryOptions the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder amqpRetryOptions(
String amqpRetryOptions) {
doSetProperty("amqpRetryOptions", amqpRetryOptions);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option is a:
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder amqpTransportType(
AmqpTransportType amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Sets the transport type by which all the communication with Azure
* Event Hubs occurs. Default value is AmqpTransportType#AMQP.
*
* The option will be converted to a
* <code>com.azure.core.amqp.AmqpTransportType</code> type.
*
* Default: AMQP
* Group: common
*
* @param amqpTransportType the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder amqpTransportType(
String amqpTransportType) {
doSetProperty("amqpTransportType", amqpTransportType);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder autoDiscoverClient(
boolean autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* Setting the autoDiscoverClient mechanism, if true, the component will
* look for a client instance in the registry automatically otherwise it
* will skip that checking.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: common
*
* @param autoDiscoverClient the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder autoDiscoverClient(
String autoDiscoverClient) {
doSetProperty("autoDiscoverClient", autoDiscoverClient);
return this;
}
/**
* Instead of supplying namespace, sharedAccessKey, sharedAccessName ...
* etc, you can just supply the connection string for your eventHub. The
* connection string for EventHubs already include all the necessary
* information to connection to your EventHub. To learn on how to
* generate the connection string, take a look at this documentation:
* https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param connectionString the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder connectionString(
String connectionString) {
doSetProperty("connectionString", connectionString);
return this;
}
/**
* The generated value for the SharedAccessName.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessKey the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder sharedAccessKey(String sharedAccessKey) {
doSetProperty("sharedAccessKey", sharedAccessKey);
return this;
}
/**
* The name you chose for your EventHubs SAS keys.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sharedAccessName the value to set
* @return the dsl builder
*/
default EventHubsEndpointBuilder sharedAccessName(
String sharedAccessName) {
doSetProperty("sharedAccessName", sharedAccessName);
return this;
}
}
/**
* Advanced builder for endpoint for the Azure Event Hubs component.
*/
public interface AdvancedEventHubsEndpointBuilder
extends
AdvancedEventHubsEndpointConsumerBuilder,
AdvancedEventHubsEndpointProducerBuilder {
default EventHubsEndpointBuilder basic() {
return (EventHubsEndpointBuilder) this;
}
}
/**
* Proxy enum for <code>com.azure.core.amqp.AmqpTransportType</code> enum.
*/
enum AmqpTransportType {
AMQP,
AMQP_WEB_SOCKETS;
}
public interface EventHubsBuilders {
/**
* Azure Event Hubs (camel-azure-eventhubs)
* The azure-eventhubs component that integrates Azure Event Hubs using
* AMQP protocol. Azure EventHubs is a highly scalable publish-subscribe
* service that can ingest millions of events per second and stream them
* to multiple consumers.
*
* Category: cloud,messaging
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-azure-eventhubs
*
* Syntax: <code>azure-eventhubs:namespace/eventHubName</code>
*
* Path parameter: namespace
* EventHubs namespace created in Azure Portal
*
* Path parameter: eventHubName
* EventHubs name under a specific namcespace
*
* @param path namespace/eventHubName
* @return the dsl builder
*/
default EventHubsEndpointBuilder azureEventhubs(String path) {
return EventHubsEndpointBuilderFactory.endpointBuilder("azure-eventhubs", path);
}
/**
* Azure Event Hubs (camel-azure-eventhubs)
* The azure-eventhubs component that integrates Azure Event Hubs using
* AMQP protocol. Azure EventHubs is a highly scalable publish-subscribe
* service that can ingest millions of events per second and stream them
* to multiple consumers.
*
* Category: cloud,messaging
* Since: 3.5
* Maven coordinates: org.apache.camel:camel-azure-eventhubs
*
* Syntax: <code>azure-eventhubs:namespace/eventHubName</code>
*
* Path parameter: namespace
* EventHubs namespace created in Azure Portal
*
* Path parameter: eventHubName
* EventHubs name under a specific namcespace
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path namespace/eventHubName
* @return the dsl builder
*/
default EventHubsEndpointBuilder azureEventhubs(
String componentName,
String path) {
return EventHubsEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static EventHubsEndpointBuilder endpointBuilder(
String componentName,
String path) {
class EventHubsEndpointBuilderImpl extends AbstractEndpointBuilder implements EventHubsEndpointBuilder, AdvancedEventHubsEndpointBuilder {
public EventHubsEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new EventHubsEndpointBuilderImpl(path);
}
}
| |
/**
* generated by Xtext 2.17.1
*/
package ck2xtext.gfx.ck2gfx.impl;
import ck2xtext.gfx.ck2gfx.Ck2gfxPackage;
import ck2xtext.gfx.ck2gfx.Coordinates;
import ck2xtext.gfx.ck2gfx.LineChartType;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Line Chart Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link ck2xtext.gfx.ck2gfx.impl.LineChartTypeImpl#getName <em>Name</em>}</li>
* <li>{@link ck2xtext.gfx.ck2gfx.impl.LineChartTypeImpl#getSize <em>Size</em>}</li>
* <li>{@link ck2xtext.gfx.ck2gfx.impl.LineChartTypeImpl#getLineWidth <em>Line Width</em>}</li>
* </ul>
*
* @generated
*/
public class LineChartTypeImpl extends MinimalEObjectImpl.Container implements LineChartType
{
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The cached value of the '{@link #getSize() <em>Size</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSize()
* @generated
* @ordered
*/
protected Coordinates size;
/**
* The default value of the '{@link #getLineWidth() <em>Line Width</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLineWidth()
* @generated
* @ordered
*/
protected static final int LINE_WIDTH_EDEFAULT = 0;
/**
* The cached value of the '{@link #getLineWidth() <em>Line Width</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLineWidth()
* @generated
* @ordered
*/
protected int lineWidth = LINE_WIDTH_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected LineChartTypeImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return Ck2gfxPackage.Literals.LINE_CHART_TYPE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getName()
{
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setName(String newName)
{
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Ck2gfxPackage.LINE_CHART_TYPE__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Coordinates getSize()
{
return size;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetSize(Coordinates newSize, NotificationChain msgs)
{
Coordinates oldSize = size;
size = newSize;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, Ck2gfxPackage.LINE_CHART_TYPE__SIZE, oldSize, newSize);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setSize(Coordinates newSize)
{
if (newSize != size)
{
NotificationChain msgs = null;
if (size != null)
msgs = ((InternalEObject)size).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - Ck2gfxPackage.LINE_CHART_TYPE__SIZE, null, msgs);
if (newSize != null)
msgs = ((InternalEObject)newSize).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - Ck2gfxPackage.LINE_CHART_TYPE__SIZE, null, msgs);
msgs = basicSetSize(newSize, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Ck2gfxPackage.LINE_CHART_TYPE__SIZE, newSize, newSize));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int getLineWidth()
{
return lineWidth;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setLineWidth(int newLineWidth)
{
int oldLineWidth = lineWidth;
lineWidth = newLineWidth;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, Ck2gfxPackage.LINE_CHART_TYPE__LINE_WIDTH, oldLineWidth, lineWidth));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case Ck2gfxPackage.LINE_CHART_TYPE__SIZE:
return basicSetSize(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case Ck2gfxPackage.LINE_CHART_TYPE__NAME:
return getName();
case Ck2gfxPackage.LINE_CHART_TYPE__SIZE:
return getSize();
case Ck2gfxPackage.LINE_CHART_TYPE__LINE_WIDTH:
return getLineWidth();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case Ck2gfxPackage.LINE_CHART_TYPE__NAME:
setName((String)newValue);
return;
case Ck2gfxPackage.LINE_CHART_TYPE__SIZE:
setSize((Coordinates)newValue);
return;
case Ck2gfxPackage.LINE_CHART_TYPE__LINE_WIDTH:
setLineWidth((Integer)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case Ck2gfxPackage.LINE_CHART_TYPE__NAME:
setName(NAME_EDEFAULT);
return;
case Ck2gfxPackage.LINE_CHART_TYPE__SIZE:
setSize((Coordinates)null);
return;
case Ck2gfxPackage.LINE_CHART_TYPE__LINE_WIDTH:
setLineWidth(LINE_WIDTH_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case Ck2gfxPackage.LINE_CHART_TYPE__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case Ck2gfxPackage.LINE_CHART_TYPE__SIZE:
return size != null;
case Ck2gfxPackage.LINE_CHART_TYPE__LINE_WIDTH:
return lineWidth != LINE_WIDTH_EDEFAULT;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuilder result = new StringBuilder(super.toString());
result.append(" (name: ");
result.append(name);
result.append(", lineWidth: ");
result.append(lineWidth);
result.append(')');
return result.toString();
}
} //LineChartTypeImpl
| |
package com.opower.persistence.jpile.infile;
import com.google.common.base.Throwables;
import com.google.common.io.CharStreams;
import org.junit.Before;
import org.junit.Test;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Date;
import static org.junit.Assert.assertEquals;
/**
* Test cases for the InfileDataBuffer
*
* @author aaron.silverman
*/
public class InfileDataBufferTest {
private InfileDataBuffer infileDataBuffer;
private static final String TIMESTAMP_STRING = "2000-01-10 08:00:01";
private static final String DATE_STRING = "2000-01-10";
private static final String TIME_STRING = "08:00:01";
private static final Date TEST_DATE = new Date(100, 0 , 10, 8 , 0, 1);
@Before
public void setUp() {
this.infileDataBuffer = new InfileDataBuffer();
}
@Test
public void testAppendString() {
String contents = "Gladiator is the best movie ever!";
this.infileDataBuffer.append(contents);
addRowAndAssertContents(contents);
}
@Test
public void testAppendStringNeedingEscaping() {
String contents = "C:\\windows\\bluescreen.png";
this.infileDataBuffer.append(contents);
addRowAndAssertContents(contents.replace("\\", "\\\\"));
}
/**
* Verify that the {@link InfileDataBuffer#append(String)} method correctly escapes special characters multiple special
* characters are present in the input String.
*/
@Test
public void testAppendStringNeedEscapingWithMultipleEscapeCharacters() {
String input = "D\ba\nv\ri\td\0D\\D\u001A";
String expected = "D\\\ba\\\nv\\\ri\\\td\\\0D\\\\D\\\u001A";
this.infileDataBuffer.append(input);
addRowAndAssertContents(expected);
}
/**
* Verify that the {@link InfileDataBuffer#append(String)} method correctly escapes special characters special characters
* are back to back in the input String.
*/
@Test
public void testAppendStringNeedEscapingBackToBackEscapeCharacters() {
String input = "Dav\r\nidDD\u001A\u001A";
String expected = "Dav\\\r\\\nidDD\\\u001A\\\u001A";
this.infileDataBuffer.append(input);
addRowAndAssertContents(expected);
}
@Test
public void testAppendByte() {
this.infileDataBuffer.append((byte) 65);
addRowAndAssertContents("A");
}
@Test
public void testAppendByteNeedingEscaping() {
this.infileDataBuffer.append((byte) 92);
addRowAndAssertContents("\\\\");
}
@Test
public void testAppendBytes() {
byte[] bytes = {72, 101, 108, 108, 111, 33};
this.infileDataBuffer.append(bytes);
addRowAndAssertContents("Hello!");
}
@Test
public void testAppendBytesNeedingEscaping() {
byte[] bytes = {67, 58, 92};
this.infileDataBuffer.append(bytes);
addRowAndAssertContents("C:\\\\");
}
/**
* Attempt to insert a row of empty data in between two rows of good data. We should not
* have a row in the infile buffer for the empty row in the middle.
*/
@Test
public void testAppendWithEmptyRowBuffer() {
String contents1 = "GO SKINS";
String contents2 = "GO WIZ";
this.infileDataBuffer.append(contents1);
this.infileDataBuffer.addRowToInfile();
this.infileDataBuffer.newRow();
this.infileDataBuffer.append("");
this.infileDataBuffer.addRowToInfile();
this.infileDataBuffer.newRow();
this.infileDataBuffer.append(contents2);
addRowAndAssertContents(contents1 + "\n" + contents2);
}
@Test
public void testAppendNull() throws Exception {
this.infileDataBuffer.appendNull();
addRowAndAssertContents("\\N");
}
@Test(expected = NullPointerException.class)
public void testTemporalAnnotationTestClass() throws NoSuchMethodException {
this.infileDataBuffer.append(TEST_DATE, TemporalAnnotationTestClass.class.getMethod("getDate", null));
}
@Test
public void testTemporalDateAnnotation() throws NoSuchMethodException {
this.infileDataBuffer.append(TEST_DATE, TemporalAnnotationTestClass.class.getMethod("getDateWithTemporal", null));
addRowAndAssertContents(DATE_STRING);
}
@Test
public void testTemporalTimeAnnotation() throws NoSuchMethodException {
this.infileDataBuffer.append(TEST_DATE, TemporalAnnotationTestClass.class.getMethod("getTimeWithTemporal", null));
addRowAndAssertContents(TIME_STRING);
}
@Test
public void testTemporalTimestampAnnotation() throws NoSuchMethodException {
this.infileDataBuffer.append(TEST_DATE, TemporalAnnotationTestClass.class.getMethod("getTimestampWithTemporal", null));
addRowAndAssertContents(TIMESTAMP_STRING);
}
@Test
public void testNullDateWithTemporal() throws NoSuchMethodException {
this.infileDataBuffer.append(null, TemporalAnnotationTestClass.class.getMethod("getTimestampWithTemporal", null));
addRowAndAssertContents("\\N");
}
@Test
public void testFloatWithPrecisionAndScale1() {
this.infileDataBuffer.append(84009.469f, 12, 3);
addRowAndAssertContents("84009.469");
}
@Test
public void testFloatWithPrecisionAndScale2() {
this.infileDataBuffer.append(182921.969f, 12, 3);
addRowAndAssertContents("182921.969");
}
@Test
public void testFloatWithPrecisionAndScale3() {
this.infileDataBuffer.append(16725.617f, 12, 3);
addRowAndAssertContents("16725.617");
}
private void addRowAndAssertContents(String expected) {
try {
this.infileDataBuffer.addRowToInfile();
assertEquals(expected, CharStreams.toString(new InputStreamReader(this.infileDataBuffer.asInputStream())));
}
catch (IOException ex) {
throw Throwables.propagate(ex);
}
}
@Entity
@Table
private class TemporalAnnotationTestClass {
private Date date;
private Date dateWithTemporal;
private Date timeWithTemporal;
private Date timestampWithTemporal;
@Column(name = "date")
public Date getDate() {
return this.date;
}
public void setDate(Date date) {
this.date = date;
}
@Temporal(TemporalType.DATE)
@Column(name = "date_with_temporal")
public Date getDateWithTemporal() {
return this.dateWithTemporal;
}
public void setDateWithTemporal(Date dateWithTemporal) {
this.dateWithTemporal = dateWithTemporal;
}
@Temporal(TemporalType.TIME)
@Column(name = "time_with_temporal")
public Date getTimeWithTemporal() {
return this.timeWithTemporal;
}
public void setTimeWithTemporal(Date timeWithTemporal) {
this.timeWithTemporal = timeWithTemporal;
}
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "timestamp_with_temporal")
public Date getTimestampWithTemporal() {
return this.timestampWithTemporal;
}
public void setTimestampWithTemporal(Date timestampWithTemporal) {
this.timestampWithTemporal = timestampWithTemporal;
}
}
}
| |
/*
* Copyright 2022 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.ballroom;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import org.jboss.gwt.elemento.core.Elements;
import org.jboss.gwt.elemento.core.IsElement;
import org.jboss.hal.ballroom.dataprovider.DataProvider;
import org.jboss.hal.ballroom.dataprovider.Display;
import org.jboss.hal.ballroom.dataprovider.Filter;
import org.jboss.hal.ballroom.dataprovider.FilterValue;
import org.jboss.hal.ballroom.dataprovider.PageInfo;
import org.jboss.hal.ballroom.dataprovider.SelectionInfo;
import org.jboss.hal.meta.security.Constraint;
import org.jboss.hal.meta.security.Constraints;
import org.jboss.hal.resources.CSS;
import org.jboss.hal.resources.Constants;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.resources.Messages;
import org.jboss.hal.resources.UIConstants;
import org.jboss.hal.spi.Callback;
import com.google.common.base.Strings;
import com.google.gwt.core.client.GWT;
import elemental2.dom.Element;
import elemental2.dom.HTMLElement;
import elemental2.dom.HTMLInputElement;
import rx.Subscription;
import static com.intendia.rxgwt.elemento.RxElemento.fromEvent;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.stream.Collectors.toList;
import static org.jboss.gwt.elemento.core.Elements.*;
import static org.jboss.gwt.elemento.core.Elements.form;
import static org.jboss.gwt.elemento.core.Elements.label;
import static org.jboss.gwt.elemento.core.EventType.click;
import static org.jboss.gwt.elemento.core.EventType.keyup;
import static org.jboss.gwt.elemento.core.InputType.text;
import static org.jboss.hal.ballroom.LayoutBuilder.column;
import static org.jboss.hal.resources.CSS.*;
import static org.jboss.hal.resources.CSS.label;
/**
* PatternFly toolbar. Should be connected to a {@link DataProvider} (which in turn updates its displays e.g. a list view):
*
* <pre>
* DataProvider dataProvider = ...;
* ListView listView = ...;
* Toolbar toolbar = ...;
*
* dataProvider.addDisplay(listView);
* dataProvider.addDisplay(toolbar);
* ...
* dataProvider.update(items);
* </pre>
*
* <p>
* Please note that the toolbar uses its own {@code <div class="row"/>} element. This is important if you add the toolbar using
* the methods from {@link org.jboss.hal.ballroom.LayoutBuilder}:
* </p>
*
* <pre>
* Toolbar toolbar = ...;
* elements()
* .add(toolbar)
* .add(row()
* .add(column()
* .add(...)))
* </pre>
*
* @see <a href=
* "https://www.patternfly.org/pattern-library/forms-and-controls/toolbar/">https://www.patternfly.org/pattern-library/forms-and-controls/toolbar/</a>
*/
public class Toolbar<T> implements Display<T>, IsElement<HTMLElement>, Attachable {
private static final String DATA_FILTER = "filter";
private static final String DATA_ACTIVE_FILTER = "activeFilter";
private static final String DATA_ACTIVE_FILTER_VALUE = "activeFilterValue";
private static final String DATA_SORT = "sort";
private static final Constants CONSTANTS = GWT.create(Constants.class);
private static final Messages MESSAGES = GWT.create(Messages.class);
private final DataProvider<T> dataProvider;
private Attribute<T> selectedFilter;
private Attribute<T> selectedSort;
private boolean asc;
private final HTMLElement root;
private HTMLElement filterLabel;
private HTMLElement filterButtonText;
private HTMLElement filterUl;
private HTMLInputElement filterInput;
private Subscription keyUpSubscription;
private HTMLElement sortButtonText;
private HTMLElement sortStaticText;
private HTMLElement sortOrderIcon;
private HTMLElement sortUl;
private final HTMLElement results;
private final HTMLElement selection;
private final HTMLElement filters;
private final HTMLElement activeFiltersUl;
public Toolbar(DataProvider<T> dataProvider, List<Attribute<T>> attributes, List<Action> actions) {
this.dataProvider = dataProvider;
HTMLElement controlContainer;
HTMLElement resultContainer;
this.root = div().css(row, toolbarPf)
.add(column()
.add(controlContainer = form().css(toolbarPfActions).element())
.add(resultContainer = div().css(row, toolbarPfResults).element()))
.element();
// filter
List<Attribute<T>> filterAttributes = attributes.stream()
.filter(attribute -> {
Filter<T> filter = attribute.filter;
return filter != null;
})
.collect(toList());
if (!filterAttributes.isEmpty()) {
HTMLElement inputGroup;
controlContainer.appendChild(div().css(formGroup, toolbarPfFilter)
.add(filterLabel = label()
.css(srOnly)
.apply(l -> l.htmlFor = Ids.TOOLBAR_FILTER).element())
.add(inputGroup = div().css(CSS.inputGroup).element()).element());
if (filterAttributes.size() > 1) {
inputGroup.appendChild(div().css(inputGroupBtn)
.add(button().css(btn, btnDefault, dropdownToggle)
.data(UIConstants.TOGGLE, UIConstants.DROPDOWN)
.aria(UIConstants.HAS_POPUP, UIConstants.TRUE)
.aria(UIConstants.EXPANDED, UIConstants.FALSE)
.add(filterButtonText = span().css(marginRight5).element())
.add(span().css(caret)))
.add(filterUl = ul().css(dropdownMenu).element()).element());
for (Attribute<T> attribute : filterAttributes) {
filterUl.appendChild(li()
.data(DATA_FILTER, attribute.name)
.add(a().css(clickable)
.on(click, e -> setSelectedFilter(attribute))
.textContent(attribute.title))
.element());
}
}
inputGroup.appendChild(filterInput = input(text)
.css(formControl)
.id(Ids.TOOLBAR_FILTER).element());
}
// sort
List<Attribute<T>> sortAttributes = attributes.stream()
.filter(attribute -> attribute.comparator != null)
.collect(toList());
if (!sortAttributes.isEmpty()) {
HTMLElement formGroup;
controlContainer.appendChild(formGroup = div().css(CSS.formGroup).element());
if (sortAttributes.size() > 1) {
formGroup.appendChild(div().css(dropdown, btnGroup)
.add(button().css(btn, btnDefault, dropdownToggle)
.data(UIConstants.TOGGLE, UIConstants.DROPDOWN)
.aria(UIConstants.HAS_POPUP, UIConstants.TRUE)
.aria(UIConstants.EXPANDED, UIConstants.FALSE)
.add(sortButtonText = span().css(marginRight5).element())
.add(span().css(caret)))
.add(sortUl = ul().css(dropdownMenu).element()).element());
for (Attribute<T> attribute : sortAttributes) {
sortUl.appendChild(li()
.data(DATA_SORT, attribute.name)
.add(a().css(clickable)
.on(click, e -> sort(attribute))
.textContent(attribute.title))
.element());
}
} else {
formGroup.appendChild(sortStaticText = span().css(formControlStatic).element());
}
asc = true;
formGroup.appendChild(button().css(btn, btnLink)
.apply(b -> b.type = UIConstants.BUTTON)
.on(click, e -> toggleSortOrder())
.add(sortOrderIcon = span().element()).element());
}
// actions
if (!actions.isEmpty()) {
HTMLElement actionsContainer;
controlContainer.appendChild(div().css(toolbarPfActionRight)
.add(actionsContainer = div().css(formGroup).element()).element());
int i = 0;
HTMLElement ul = null;
for (Iterator<Action> iterator = actions.iterator(); iterator.hasNext(); i++) {
Action action = iterator.next();
String actionId = Ids.build(Ids.TOOLBAR, "actions", action.id);
if (i < 3) {
actionsContainer.appendChild(button()
.css(btn, btnDefault)
.id(actionId)
.textContent(action.text)
.title(action.title)
.on(click, e -> action.callback.execute())
.apply(b -> b.type = UIConstants.BUTTON).element());
if (i == 2) {
actionsContainer.appendChild(div().css(dropdown, btnGroup, dropdownKebabPf)
.add(button().css(btn, btnLink, dropdownToggle)
.id(Ids.TOOLBAR_ACTION_DROPDOWN)
.data(UIConstants.TOGGLE, UIConstants.DROPDOWN)
.aria(UIConstants.HAS_POPUP, UIConstants.TRUE)
.aria(UIConstants.EXPANDED, UIConstants.FALSE)
.add(span().css(fontAwesome("ellipsis-v"))))
.add(ul = ul().css(dropdownMenu, dropdownMenuRight)
.aria(UIConstants.LABELLED_BY, Ids.TOOLBAR_ACTION_DROPDOWN).element())
.element());
}
} else {
ul.appendChild(li()
.add(a().css(clickable)
.on(click, e -> action.callback.execute())
.textContent(action.text))
.element());
}
}
}
// search and change view not yet implemented!
// results
resultContainer.appendChild(column(9)
.add(results = h(5).textContent(MESSAGES.results(0)).element())
.add(filters = span()
.add(p().css(marginRight5).textContent(CONSTANTS.activeFilters()))
.add(activeFiltersUl = ul().css(listInline).element())
.add(p().add(a()
.css(clickable)
.textContent(CONSTANTS.clearAllFilters())
.on(click, e -> clearAllFilters())))
.element())
.element());
resultContainer.appendChild(selection = column(3).css(listHalSelected).element());
// initial reset
filterInput.value = "";
Elements.setVisible(filters, false);
Elements.removeChildrenFrom(activeFiltersUl);
if (filterAttributes.isEmpty()) {
selectedFilter = null;
} else {
setSelectedFilter(filterAttributes.get(0));
}
this.asc = true;
sortOrderIcon.className = fontAwesome("sort-alpha-asc");
if (sortAttributes.isEmpty()) {
selectedSort = null;
} else {
setSelectedSort(sortAttributes.get(0));
}
this.results.textContent = MESSAGES.results(0);
}
@Override
public HTMLElement element() {
return root;
}
@Override
public void attach() {
if (filterInput != null) {
keyUpSubscription = fromEvent(filterInput, keyup)
.throttleLast(750, MILLISECONDS)
.subscribe(e -> addOrModifySelectedFilter(selectedFilter));
}
}
@Override
public void detach() {
if (keyUpSubscription != null) {
keyUpSubscription.unsubscribe();
}
}
@Override
public void showItems(Iterable<T> items, PageInfo pageInfo) {
results.textContent = MESSAGES.results(pageInfo.getVisible());
}
@Override
public void updateSelection(SelectionInfo selectionInfo) {
Elements.setVisible(this.selection, selectionInfo.hasSelection());
if (selectionInfo.hasSelection() && selectionInfo.isMultiSelect()) {
this.selection.innerHTML = MESSAGES.selected(selectionInfo.getSelectionCount(),
dataProvider.getPageInfo().getTotal()).asString();
}
}
// ------------------------------------------------------ event handler
private void setSelectedFilter(Attribute<T> attribute) {
selectedFilter = attribute;
if (filterUl != null) {
selectDropdownItem(filterUl, DATA_FILTER, attribute);
}
filterLabel.textContent = attribute.title;
if (filterButtonText != null) {
filterButtonText.textContent = attribute.title;
}
filterInput.value = dataProvider.getFilter(attribute.name).getValue();
filterInput.placeholder = MESSAGES.filterBy(attribute.title);
}
private void addOrModifySelectedFilter(Attribute<T> attribute) {
if (Strings.isNullOrEmpty(filterInput.value)) {
clearFilter(attribute);
} else {
Element activeFilterValue = activeFiltersUl.querySelector(
"span[data-active-filter-value=" + attribute.name + "]"); // NON-NLS
if (activeFilterValue != null) {
activeFilterValue.textContent = filterInput.value;
} else {
activeFiltersUl.appendChild(li()
.data(DATA_ACTIVE_FILTER, attribute.name)
.add(span().css(label, labelInfo)
.add(span().textContent(attribute.title + ": "))
.add(span().data(DATA_ACTIVE_FILTER_VALUE, attribute.name)
.textContent(filterInput.value))
.add(a().css(clickable)
.on(click, e -> clearFilter(attribute))
.add(span().css(pfIcon("close")))))
.element());
}
Elements.setVisible(filters, dataProvider.hasFilters());
dataProvider.addFilter(attribute.name, new FilterValue<>(attribute.filter, filterInput.value));
}
}
private void clearFilter(Attribute<T> attribute) {
Element activeFilter = activeFiltersUl.querySelector("li[data-active-filter=" + attribute.name + "]"); // NON-NLS
Elements.failSafeRemove(activeFiltersUl, activeFilter);
Elements.setVisible(filters, dataProvider.hasFilters());
dataProvider.removeFilter(attribute.name);
}
public void clearAllFilters() {
filterInput.value = "";
Elements.setVisible(filters, false);
Elements.removeChildrenFrom(activeFiltersUl);
dataProvider.clearFilters();
}
private void sort(Attribute<T> attribute) {
setSelectedSort(attribute);
dataProvider.setComparator(asc ? selectedSort.comparator : selectedSort.comparator.reversed());
}
private void setSelectedSort(Attribute<T> attribute) {
selectedSort = attribute;
if (sortUl != null) {
selectDropdownItem(sortUl, DATA_SORT, attribute);
}
if (sortButtonText != null) {
sortButtonText.textContent = attribute.title;
}
if (sortStaticText != null) {
sortStaticText.textContent = attribute.title;
}
}
private void toggleSortOrder() {
asc = !asc;
if (asc) {
sortOrderIcon.className = fontAwesome("sort-alpha-asc");
} else {
sortOrderIcon.className = fontAwesome("sort-alpha-desc");
}
dataProvider.setComparator(asc ? selectedSort.comparator : selectedSort.comparator.reversed());
}
private void selectDropdownItem(HTMLElement ul, String data, Attribute<T> attribute) {
for (HTMLElement li : Elements.children(ul)) {
li.classList.remove(selected);
}
Element li = ul.querySelector("li[data-" + data + "=" + attribute.name + "]"); // NON-NLS
if (li != null) {
li.classList.add(selected);
}
}
public static class Attribute<T> {
private final String name;
private final String title;
private final Filter<T> filter;
private final Comparator<T> comparator;
public Attribute(String name, Filter<T> filter) {
this(name, new LabelBuilder().label(name), filter, null);
}
public Attribute(String name, Comparator<T> comparator) {
this(name, new LabelBuilder().label(name), null, comparator);
}
public Attribute(String name, Filter<T> filter, Comparator<T> comparator) {
this(name, new LabelBuilder().label(name), filter, comparator);
}
public Attribute(String name, String title, Filter<T> filter) {
this(name, title, filter, null);
}
public Attribute(String name, String title, Comparator<T> comparator) {
this(name, title, null, comparator);
}
public Attribute(String name, String title, Filter<T> filter, Comparator<T> comparator) {
this.name = name;
this.title = title;
this.filter = filter;
this.comparator = comparator;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Toolbar.Attribute)) {
return false;
}
Attribute<?> attribute = (Attribute<?>) o;
return name.equals(attribute.name);
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public String toString() {
return "Toolbar.Attribute(" + name + ")";
}
}
public static class Action {
private final String id;
private final String text;
private final String title;
private final Constraints constraints;
private final Callback callback;
public Action(String id, String text, Callback callback) {
this(id, text, null, Constraints.empty(), callback);
}
public Action(String id, String text, String title, Callback callback) {
this(id, text, title, Constraints.empty(), callback);
}
public Action(String id, String text, Constraint constraint, Callback callback) {
this(id, text, null, Constraints.single(constraint), callback);
}
public Action(String id, String text, String title, Constraints constraints, Callback callback) {
this.id = id;
this.text = text;
this.title = title != null ? title : "";
this.constraints = constraints;
this.callback = callback;
}
public Constraints getConstraints() {
return constraints;
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2016, Eyck Jentzsch and others
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of GCodeFXViewer nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*******************************************************************************/
package com.itjw.gcodefx;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.controlsfx.dialog.ProgressDialog;
import org.fxmisc.richtext.CodeArea;
import org.fxmisc.richtext.LineNumberFactory;
import org.fxmisc.richtext.NavigationActions.SelectionPolicy;
import org.fxmisc.richtext.StyleSpansBuilder;
import org.reactfx.Change;
import org.reactfx.EventStream;
import org.reactfx.EventStreams;
import com.itjw.gcode.AbstractGCode;
import com.itjw.gcode.GCodeReader;
import com.itjw.gcodefx.Xform.RotateOrder;
import com.itjw.gcodefx.model.ContentModel;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.Timeline;
import javafx.application.Application.Parameters;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.value.ObservableValue;
import javafx.concurrent.Service;
import javafx.concurrent.Task;
import javafx.concurrent.WorkerStateEvent;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.geometry.Point3D;
import javafx.scene.Group;
import javafx.scene.Node;
import javafx.scene.PerspectiveCamera;
import javafx.scene.SubScene;
import javafx.scene.control.Accordion;
import javafx.scene.control.Button;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.TextArea;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Pane;
import javafx.scene.layout.VBox;
import javafx.stage.FileChooser;
public class MainViewController implements Initializable {
private static final Logger logger = Logger.getLogger(MainViewController.class.getName());
private static final String COMMAND_PATTERN = "\\b([GM][0-9]+)\\b";
private static final String PARAMETER_PATTERN = "\\b([XYZIJEF][0-9.]+)\\b";
private static final String STRING_PATTERN = "\"([^\"\\\\]|\\\\.)*\"";
private static final String COMMENT_PATTERN = ";[^\n]*";
private static final Pattern PATTERN = Pattern.compile(
"(?<COMMAND>" + COMMAND_PATTERN + ")"
+ "|(?<PARAMETER>" + PARAMETER_PATTERN + ")"
+ "|(?<STRING>" + STRING_PATTERN + ")"
+ "|(?<COMMENT>" + COMMENT_PATTERN + ")"
);
@FXML
private VBox vboxContainer;
@FXML
private TextArea logView;
@FXML
private ScrollPane editorContainer;
@FXML
private Pane viewContainer;
@FXML
private AnchorPane settingsContainer;
private SettingsController settingsController;
@FXML
private TimelineDisplay timelineDisplay;
Timeline theTimeline = new Timeline();
private SimpleIntegerProperty selectedLayer = new SimpleIntegerProperty(0);
private SimpleIntegerProperty selectedLine = new SimpleIntegerProperty(0);
int animationType=1;
public Button startBtn;
public Button rwBtn;
public ToggleButton playBtn;
public Button ffBtn;
public Button endBtn;
public ToggleButton loopBtn;
private final CodeArea codeArea = new CodeArea();
private SubScene subScene;
final PerspectiveCamera camera = new PerspectiveCamera(true);
final Xform cameraXform =new Xform(RotateOrder.XYZ);
final double cameraDistance = 650;
final Group printerSpace = new Group();
{
printerSpace.setId("printerSpace");
}
JavaFXMachineProcessor jfxProcessor = new JavaFXMachineProcessor();
private String inputFileName;
private TextAreaStream textAreaStream;
private TimelineController timelineController;
/**
* Initializes the controller class.
*
* @param url
* @param rb
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
try {
// CREATE SETTINGS PANEL
FXMLLoader fxmlLoader = new FXMLLoader(GCodeFXViewer.class.getResource("settings.fxml"));
Accordion settingsPanel = fxmlLoader.load();
settingsController = fxmlLoader.getController();
// SETUP SPLIT PANE
settingsContainer.getChildren().add(settingsPanel);
AnchorPane.setLeftAnchor(settingsPanel, 0d);
AnchorPane.setTopAnchor(settingsPanel, 0d);
AnchorPane.setRightAnchor(settingsPanel, 0d);
AnchorPane.setBottomAnchor(settingsPanel, 0d);
} catch (IOException e) {
logger.log(Level.SEVERE, null, e);
}
// setup logging
textAreaStream = new TextAreaStream(getLogView());
Logger rootLogger = Logger.getLogger("");
rootLogger.addHandler(textAreaStream.getLogHandler());
// System.setErr(textAreaStream); // redirect System.err
// System.setOut(textAreaStream);
final ContentModel contentModel = GCodeFXViewer.getContentModel();
// create timelineController;
timelineController = new TimelineController(startBtn,rwBtn,playBtn,ffBtn,endBtn,loopBtn);
timelineController.timelineProperty().bind(contentModel.timelineProperty());
timelineDisplay.timelineProperty().bind(contentModel.timelineProperty());
selectedLayer.addListener((ov, o, n)->{
if(n!=null && n.intValue()>0){
int i=0;
for(Node node: printerSpace.getChildren()){
node.setVisible(n.intValue()>i++);
}
}
});
selectedLine.addListener((ov, o, n)->{
if(n!=null && n.intValue()>0){
codeArea.moveTo(codeArea.position(n.intValue(), 1).toOffset());
}
});
// setup the code area
codeArea.textProperty().addListener(
(ov, oldText, newText) -> {
int lastKwEnd = 0;
StyleSpansBuilder<Collection<String>> spansBuilder = new StyleSpansBuilder<>();
Matcher matcher = PATTERN.matcher(newText);
while (matcher.find()) {
String styleClass =
matcher.group("COMMAND") != null ? "command" :
matcher.group("PARAMETER") != null ? "parameter" :
matcher.group("STRING") != null ? "string" :
matcher.group("COMMENT") != null ? "comment" :
null; /* never happens */
assert styleClass != null;
spansBuilder.add(Collections.emptyList(), matcher.start() - lastKwEnd);
spansBuilder.add(Collections.singleton(styleClass), matcher.end() - matcher.start());
lastKwEnd = matcher.end();
}
spansBuilder.add(Collections.emptyList(), newText.length() - lastKwEnd);
codeArea.setStyleSpans(0, spansBuilder.create());
});
EventStream<Change<String>> textEvents = EventStreams.changesOf(codeArea.textProperty());
textEvents.reduceSuccessions((a, b) -> b, Duration.ofMillis(500)).subscribe(code -> {
compile(code.getNewValue());
});
codeArea.replaceText("G1 X230 Y25 Z0.35 F5000\nG1 X20 E25 F1000\n");
//codeArea.replaceText("G0 F6000 X119.175 Y115.993 Z25.000\nM104 S0\nM140 S0\nG91\nG1 E-1 F300\nG1 Z+0.5 E-5 X-20 Y-20 F6000\nG28 X0 Y0\nM84\nG90\n");
codeArea.setParagraphGraphicFactory(LineNumberFactory.get(codeArea));
codeArea.currentParagraphProperty().addListener(
(ObservableValue<? extends Integer> observable, Integer oldValue, Integer newValue)->{
GCodeFXViewer.getContentModel().setHighlight(jfxProcessor.getNode4Line(newValue+1));
});
editorContainer.setContent(codeArea);
// setup the 3D view area
setSubScene();
contentModel.subSceneProperty().addListener((ov, oldVal, newVal)->{
viewContainer.getChildren().clear();
setSubScene();
});
contentModel.selectedGcodeProperty().addListener((ov, oldVal, newVal)->{
if(newVal!=null)
codeArea.moveTo(codeArea.position(newVal.getLineNo()-1, 0).toOffset());
});
settingsController.firstLayerProperty().addListener((ov, oldVal, newVal)->{
updateLayerVisibility();
});
settingsController.lastLayerProperty().addListener((ov, oldVal, newVal)->{
updateLayerVisibility();
});
}
private void setSubScene() {
subScene = GCodeFXViewer.getContentModel().getSubScene();
subScene.widthProperty().bind(viewContainer.widthProperty());
subScene.heightProperty().bind(viewContainer.heightProperty());
viewContainer.getChildren().add(subScene);
}
private void updateLayerVisibility(){
int i=1;
double first = settingsController.getFirstLayer();
double last = settingsController.getLastLayer();
for(Node n: printerSpace.getChildren()){
n.setVisible(i>=first && i<=last);
i++;
}
}
private static class GcodeParseService extends Service<List<Layer>> {
private final String code;
private final JavaFXMachineProcessor jfxProcessor;
public GcodeParseService(JavaFXMachineProcessor jfxProcessor, String code) {
super();
this.jfxProcessor=jfxProcessor;
this.code = code;
}
protected Task<List<Layer>> createTask() {
return new Task<List<Layer>>() {
protected List<Layer> call() throws IOException {
List<AbstractGCode> gcodes = GCodeReader.parseStrings(code);
if(gcodes != null){
updateMessage("Generating 3D objects");
int size = gcodes.size();
int i=1;
try{
for(AbstractGCode gcode:gcodes){
updateProgress(i++, size);
gcode.process(jfxProcessor);
}
} catch(Throwable t){
logger.log(Level.SEVERE, "Failure parsing gcode", t);
}
return (jfxProcessor.getGcodeGroup());
}
return null;
}
};
}
}
private void compile(final String code) {
logView.setText("");
printerSpace.getChildren().clear();
GCodeFXViewer.getContentModel().setTimeline(null);
jfxProcessor.initialize();
GcodeParseService service = new GcodeParseService(jfxProcessor,code);
service.setOnSucceeded(new EventHandler<WorkerStateEvent>() {
@SuppressWarnings({ "unchecked" })
@Override
public void handle(WorkerStateEvent t) {
logger.log(Level.INFO, "File "+inputFileName+" loaded");
Object res = t.getSource().getValue();
if(res!=null && res instanceof List<?>){
printerSpace.getChildren().addAll((List<? extends Node>)res);
Point3D dim = jfxProcessor.getPlateDimensions();
ContentModel contentModel = GCodeFXViewer.getContentModel();
contentModel.setContent(printerSpace);
if(dim!=null) contentModel.setDimension(dim);
contentModel.resetCamera(true);
int count = printerSpace.getChildren().size()+1;
settingsController.setFirstLayerValues(count, 1);
settingsController.setLastLayerValues(count, count);
logger.log(Level.INFO, "Added "+printerSpace.getChildren().size()+" layer(s)");
theTimeline.setCycleCount(Timeline.INDEFINITE);
theTimeline.setAutoReverse(false);
KeyFrame keyFrame=null;
switch(animationType){
case 0:{
KeyValue keyValue = new KeyValue(contentModel.getCameraRotate().rz.angleProperty(), 360);
keyFrame = new KeyFrame(javafx.util.Duration.seconds(30), keyValue);
}
break;
case 1:{
KeyValue keyValue = new KeyValue(selectedLayer, printerSpace.getChildren().size());
keyFrame = new KeyFrame(javafx.util.Duration.seconds(printerSpace.getChildren().size()/2), keyValue);
}
break;
case 2:{
// TODO: needs implementation
KeyValue keyValue = new KeyValue(selectedLine, printerSpace.getChildren().size());
keyFrame = new KeyFrame(javafx.util.Duration.seconds(printerSpace.getChildren().size()/2), keyValue);
}
break;
default:
break;
}
//add the keyframe to the timeline
if(keyFrame!=null) theTimeline.getKeyFrames().add(keyFrame);
contentModel.setTimeline(theTimeline);
}
}
});
ProgressDialog pd=new ProgressDialog(service);
pd.setTitle("Loading model");
pd.setHeaderText("Progress parsing and generating model");
service.start();
}
/**
* Returns the location of the Jar archive or .class file the specified
* class has been loaded from. <b>Note:</b> this only works if the class is
* loaded from a jar archive or a .class file on the locale file system.
*
* @param cls class to locate
* @return the location of the Jar archive the specified class comes from
*/
public static File getClassLocation(Class<?> cls) {
String className = cls.getName();
ClassLoader cl = cls.getClassLoader();
URL url = cl.getResource(className.replace(".", "/") + ".class");
String urlString = url.toString().replace("jar:", "");
if (!urlString.startsWith("file:")) {
throw new IllegalArgumentException("The specified class\"" + cls.getName() + "\" has not been loaded from a location on the local filesystem.");
}
urlString = urlString.replace("file:", "");
urlString = urlString.replace("%20", " ");
int location = urlString.indexOf(".jar!");
if (location > 0) {
urlString = urlString.substring(0, location) + ".jar";
}
return new File(urlString);
}
@FXML
private void onLoadFile(ActionEvent e) {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open GCode File");
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("GCode files (*.gcode, *.gc)", "*.gcode", "*.gc"));
File f = fileChooser.showOpenDialog(null);
if (f == null) {
return;
}
loadFile( f.getAbsolutePath());
inputFileName=f.getAbsolutePath();
}
@FXML
private void onSaveFile(ActionEvent e) {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Save GCode File");
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("GCode files (*.gcode, *.gc)", "*.gcode", "*.gc"));
if(inputFileName!=null) fileChooser.setInitialFileName(inputFileName);
File f = fileChooser.showSaveDialog(null);
if (f == null) return;
String fName = f.getAbsolutePath();
try {
Files.write(Paths.get(fName), codeArea.getText().getBytes("UTF-8"));
} catch (IOException ex) {
Logger.getLogger(MainViewController.class.getName()).log(Level.SEVERE, null, ex);
}
}
@FXML
private void onClose(ActionEvent e) {
System.exit(0);
}
@FXML
private void onResetPerspective(ActionEvent e) {
GCodeFXViewer.getContentModel().resetCamera(true);
}
public TextArea getLogView() {
return logView;
}
public CodeArea getCodeArea(){
return codeArea;
}
private void loadFile(String fName) {
try {
codeArea.replaceText(new String(Files.readAllBytes(Paths.get(fName)), "UTF-8"));
codeArea.start(SelectionPolicy.CLEAR);
} catch (IOException ex) {
Logger.getLogger(MainViewController.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void setArguments(Parameters parameters) {
if(!parameters.getUnnamed().isEmpty()){
inputFileName=parameters.getUnnamed().get(0);
loadFile(inputFileName);
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import java.util.List;
import com.google.common.collect.ImmutableList;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFFlowStatsReplyVer13 implements OFFlowStatsReply {
private static final Logger logger = LoggerFactory.getLogger(OFFlowStatsReplyVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int MINIMUM_LENGTH = 16;
private final static long DEFAULT_XID = 0x0L;
private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of();
private final static List<OFFlowStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFFlowStatsEntry>of();
// OF message fields
private final long xid;
private final Set<OFStatsReplyFlags> flags;
private final List<OFFlowStatsEntry> entries;
//
// Immutable default instance
final static OFFlowStatsReplyVer13 DEFAULT = new OFFlowStatsReplyVer13(
DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES
);
// package private constructor - used by readers, builders, and factory
OFFlowStatsReplyVer13(long xid, Set<OFStatsReplyFlags> flags, List<OFFlowStatsEntry> entries) {
if(flags == null) {
throw new NullPointerException("OFFlowStatsReplyVer13: property flags cannot be null");
}
if(entries == null) {
throw new NullPointerException("OFFlowStatsReplyVer13: property entries cannot be null");
}
this.xid = xid;
this.flags = flags;
this.entries = entries;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.FLOW;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public List<OFFlowStatsEntry> getEntries() {
return entries;
}
public OFFlowStatsReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFFlowStatsReply.Builder {
final OFFlowStatsReplyVer13 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsReplyFlags> flags;
private boolean entriesSet;
private List<OFFlowStatsEntry> entries;
BuilderWithParent(OFFlowStatsReplyVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowStatsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.FLOW;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public OFFlowStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFFlowStatsEntry> getEntries() {
return entries;
}
@Override
public OFFlowStatsReply.Builder setEntries(List<OFFlowStatsEntry> entries) {
this.entries = entries;
this.entriesSet = true;
return this;
}
@Override
public OFFlowStatsReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
List<OFFlowStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries;
if(entries == null)
throw new NullPointerException("Property entries must not be null");
//
return new OFFlowStatsReplyVer13(
xid,
flags,
entries
);
}
}
static class Builder implements OFFlowStatsReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsReplyFlags> flags;
private boolean entriesSet;
private List<OFFlowStatsEntry> entries;
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.STATS_REPLY;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowStatsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.FLOW;
}
@Override
public Set<OFStatsReplyFlags> getFlags() {
return flags;
}
@Override
public OFFlowStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFFlowStatsEntry> getEntries() {
return entries;
}
@Override
public OFFlowStatsReply.Builder setEntries(List<OFFlowStatsEntry> entries) {
this.entries = entries;
this.entriesSet = true;
return this;
}
//
@Override
public OFFlowStatsReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
List<OFFlowStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES;
if(entries == null)
throw new NullPointerException("Property entries must not be null");
return new OFFlowStatsReplyVer13(
xid,
flags,
entries
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFFlowStatsReply> {
@Override
public OFFlowStatsReply readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 4
byte version = bb.readByte();
if(version != (byte) 0x4)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version);
// fixed value property type == 19
byte type = bb.readByte();
if(type != (byte) 0x13)
throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property statsType == 1
short statsType = bb.readShort();
if(statsType != (short) 0x1)
throw new OFParseError("Wrong statsType: Expected=OFStatsType.FLOW(1), got="+statsType);
Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer13.readFrom(bb);
// pad: 4 bytes
bb.skipBytes(4);
List<OFFlowStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFFlowStatsEntryVer13.READER);
OFFlowStatsReplyVer13 flowStatsReplyVer13 = new OFFlowStatsReplyVer13(
xid,
flags,
entries
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", flowStatsReplyVer13);
return flowStatsReplyVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFFlowStatsReplyVer13Funnel FUNNEL = new OFFlowStatsReplyVer13Funnel();
static class OFFlowStatsReplyVer13Funnel implements Funnel<OFFlowStatsReplyVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFFlowStatsReplyVer13 message, PrimitiveSink sink) {
// fixed value property version = 4
sink.putByte((byte) 0x4);
// fixed value property type = 19
sink.putByte((byte) 0x13);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property statsType = 1
sink.putShort((short) 0x1);
OFStatsReplyFlagsSerializerVer13.putTo(message.flags, sink);
// skip pad (4 bytes)
FunnelUtils.putList(message.entries, sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFFlowStatsReplyVer13> {
@Override
public void write(ChannelBuffer bb, OFFlowStatsReplyVer13 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 4
bb.writeByte((byte) 0x4);
// fixed value property type = 19
bb.writeByte((byte) 0x13);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property statsType = 1
bb.writeShort((short) 0x1);
OFStatsReplyFlagsSerializerVer13.writeTo(bb, message.flags);
// pad: 4 bytes
bb.writeZero(4);
ChannelUtils.writeList(bb, message.entries);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFFlowStatsReplyVer13(");
b.append("xid=").append(xid);
b.append(", ");
b.append("flags=").append(flags);
b.append(", ");
b.append("entries=").append(entries);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFFlowStatsReplyVer13 other = (OFFlowStatsReplyVer13) obj;
if( xid != other.xid)
return false;
if (flags == null) {
if (other.flags != null)
return false;
} else if (!flags.equals(other.flags))
return false;
if (entries == null) {
if (other.entries != null)
return false;
} else if (!entries.equals(other.entries))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((flags == null) ? 0 : flags.hashCode());
result = prime * result + ((entries == null) ? 0 : entries.hashCode());
return result;
}
}
| |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pivotal.receptor.commands;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import io.pivotal.receptor.actions.RunAction;
import io.pivotal.receptor.support.EnvironmentVariable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author Mark Fisher
*/
public class TaskCreateRequest {
@JsonProperty("task_guid")
private String taskGuid;
private String domain = "lattice";
private String stack = "lucid64";
private String rootfs;
private EnvironmentVariable[] env = new EnvironmentVariable[] {};
@JsonProperty("cpu_weight")
private int cpuWeight;
@JsonProperty("disk_mb")
private int diskMb;
@JsonProperty("memory_mb")
private int memoryMb;
private boolean privileged;
@JsonIgnore
public RunAction runAction = new RunAction();
@JsonProperty("result_file")
private String resultFile;
@JsonProperty("completion_callback_url")
private String completionCallbackUrl;
@JsonProperty("log_guid")
private String logGuid;
@JsonProperty("log_source")
private String logSource;
private String annotation;
@JsonProperty("egress_rules")
private EgressRule[] egressRules = new EgressRule[] {};
public String getTaskGuid() {
return taskGuid;
}
public void setTaskGuid(String taskGuid) {
this.taskGuid = taskGuid;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getStack() {
return stack;
}
public void setStack(String stack) {
this.stack = stack;
}
public String getRootfs() {
return rootfs;
}
public void setRootfs(String rootfs) {
this.rootfs = rootfs;
}
public EnvironmentVariable[] getEnv() {
return env;
}
public void setEnv(EnvironmentVariable[] env) {
this.env = env;
}
public int getCpuWeight() {
return cpuWeight;
}
public void setCpuWeight(int cpuWeight) {
this.cpuWeight = cpuWeight;
}
public int getDiskMb() {
return diskMb;
}
public void setDiskMb(int diskMb) {
this.diskMb = diskMb;
}
public int getMemoryMb() {
return memoryMb;
}
public void setMemoryMb(int memoryMb) {
this.memoryMb = memoryMb;
}
public boolean isPrivileged() {
return privileged;
}
public void setPrivileged(boolean privileged) {
this.privileged = privileged;
}
public Map<String, RunAction> getAction() {
return Collections.singletonMap("run", runAction);
}
public void setAction(Map<String, RunAction> action) {
this.runAction = action.get("run");
}
public String getResultFile() {
return resultFile;
}
public void setResultFile(String resultFile) {
this.resultFile = resultFile;
}
public String getCompletionCallbackUrl() {
return completionCallbackUrl;
}
public void setCompletionCallbackUrl(String completionCallbackUrl) {
this.completionCallbackUrl = completionCallbackUrl;
}
public String getLogGuid() {
return logGuid;
}
public void setLogGuid(String logGuid) {
this.logGuid = logGuid;
}
public String getLogSource() {
return logSource;
}
public void setLogSource(String logSource) {
this.logSource = logSource;
}
public String getAnnotation() {
return annotation;
}
public void setAnnotation(String annotation) {
this.annotation = annotation;
}
public EgressRule[] getEgressRules() {
return egressRules;
}
public void setEgressRules(EgressRule[] egressRules) {
this.egressRules = egressRules;
}
@Override
public String toString() {
return "TaskCreateRequest [taskGuid=" + taskGuid + ", domain=" + domain + ", stack=" + stack + ", rootfs="
+ rootfs + ", env=" + Arrays.toString(env) + ", cpuWeight=" + cpuWeight + ", diskMb=" + diskMb
+ ", memoryMb=" + memoryMb + ", privileged=" + privileged + ", runAction=" + runAction
+ ", resultFile=" + resultFile + ", completionCallbackUrl=" + completionCallbackUrl + ", logGuid="
+ logGuid + ", logSource=" + logSource + ", annotation=" + annotation + ", egressRules="
+ Arrays.toString(egressRules) + "]";
}
public static class EgressRule {
private String protocol;
private String[] destinations = new String[] {};
private PortRange portRange;
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public String[] getDestinations() {
return destinations;
}
public void setDestinations(String[] destinations) {
this.destinations = destinations;
}
public PortRange getPortRange() {
return portRange;
}
public void setPortRange(PortRange portRange) {
this.portRange = portRange;
}
@Override
public String toString() {
return "EgressRule [protocol=" + protocol + ", destinations=" + Arrays.toString(destinations)
+ ", portRange=" + portRange + "]";
}
}
public static class PortRange {
private int start;
private int end;
public int getStart() {
return start;
}
public void setStart(int start) {
this.start = start;
}
public int getEnd() {
return end;
}
public void setEnd(int end) {
this.end = end;
}
@Override
public String toString() {
return "PortRange [start=" + start + ", end=" + end + "]";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.util;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertThat;
import org.apache.beam.sdk.transforms.Combine.CombineFn;
import org.apache.beam.sdk.transforms.DoFnTester;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFns;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.Sessions;
import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.TimestampedValue;
import org.apache.beam.sdk.values.TupleTag;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.joda.time.Duration;
import org.joda.time.Instant;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
/**
* Properties of {@link GroupAlsoByWindowsDoFn}.
*
* <p>Some properties may not hold of some implementations, due to restrictions on the context
* in which the implementation is applicable. For example, some {@code GroupAlsoByWindows} may not
* support merging windows.
*/
public class GroupAlsoByWindowsProperties {
/**
* A factory of {@link GroupAlsoByWindowsDoFn} so that the various properties can provide
* the appropriate windowing strategy under test.
*/
public interface GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> {
<W extends BoundedWindow> GroupAlsoByWindowsDoFn<K, InputT, OutputT, W>
forStrategy(WindowingStrategy<?, W> strategy);
}
/**
* Tests that for empty input and the given {@link WindowingStrategy}, the provided GABW
* implementation produces no output.
*
* <p>The input type is deliberately left as a wildcard, since it is not relevant.
*/
public static <K, InputT, OutputT> void emptyInputEmptyOutput(
GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)));
DoFnTester<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> result = runGABW(
gabwFactory,
windowingStrategy,
(K) null, // key should never be used
Collections.<WindowedValue<InputT>>emptyList());
assertThat(result.peekOutputElements(), hasSize(0));
}
/**
* Tests that for a simple sequence of elements on the same key, the given GABW implementation
* correctly groups them according to fixed windows.
*/
public static void groupsElementsIntoFixedWindows(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)));
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "key",
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(2),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(13),
Arrays.asList(window(10, 20)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 10)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(window(0, 10).maxTimestamp()));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(10, 20)));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(window(10, 20).maxTimestamp()));
}
/**
* Tests that for a simple sequence of elements on the same key, the given GABW implementation
* correctly groups them into sliding windows.
*
* <p>In the input here, each element occurs in multiple windows.
*/
public static void groupsElementsIntoSlidingWindowsWithMinTimestamp(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(
SlidingWindows.of(Duration.millis(20)).every(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp());
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "key",
WindowedValue.of(
"v1",
new Instant(5),
Arrays.asList(window(-10, 10), window(0, 20)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(15),
Arrays.asList(window(0, 20), window(10, 30)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(3));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(-10, 10)));
assertThat(item0.getValue().getValue(), contains("v1"));
assertThat(item0.getTimestamp(), equalTo(new Instant(5)));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 20)));
assertThat(item1.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item1.getTimestamp(), equalTo(new Instant(10)));
TimestampedValue<KV<String, Iterable<String>>> item2 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(10, 30)));
assertThat(item2.getValue().getValue(), contains("v2"));
assertThat(item2.getTimestamp(), equalTo(new Instant(20)));
}
/**
* Tests that for a simple sequence of elements on the same key, the given GABW implementation
* correctly groups and combines them according to sliding windows.
*
* <p>In the input here, each element occurs in multiple windows.
*/
public static void combinesElementsInSlidingWindows(
GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory,
CombineFn<Long, ?, Long> combineFn)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(SlidingWindows.of(Duration.millis(20)).every(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp());
DoFnTester<KV<String, Iterable<WindowedValue<Long>>>, KV<String, Long>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
1L,
new Instant(5),
Arrays.asList(window(-10, 10), window(0, 20)),
PaneInfo.NO_FIRING),
WindowedValue.of(
2L,
new Instant(15),
Arrays.asList(window(0, 20), window(10, 30)),
PaneInfo.NO_FIRING),
WindowedValue.of(
4L,
new Instant(18),
Arrays.asList(window(0, 20), window(10, 30)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(3));
TimestampedValue<KV<String, Long>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(-10, 10)));
assertThat(item0.getValue().getKey(), equalTo("k"));
assertThat(item0.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(1L))));
assertThat(item0.getTimestamp(), equalTo(new Instant(5L)));
TimestampedValue<KV<String, Long>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 20)));
assertThat(item1.getValue().getKey(), equalTo("k"));
assertThat(item1.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(1L, 2L, 4L))));
assertThat(item1.getTimestamp(), equalTo(new Instant(5L)));
TimestampedValue<KV<String, Long>> item2 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(10, 30)));
assertThat(item2.getValue().getKey(), equalTo("k"));
assertThat(item2.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(2L, 4L))));
assertThat(item2.getTimestamp(), equalTo(new Instant(15L)));
}
/**
* Tests that the given GABW implementation correctly groups elements that fall into overlapping
* windows that are not merged.
*/
public static void groupsIntoOverlappingNonmergingWindows(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)));
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "key",
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 5)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(4),
Arrays.asList(window(1, 5)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(4),
Arrays.asList(window(0, 5)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 5)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v3"));
assertThat(item0.getTimestamp(), equalTo(window(1, 5).maxTimestamp()));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(1, 5)));
assertThat(item1.getValue().getValue(), contains("v2"));
assertThat(item1.getTimestamp(), equalTo(window(0, 5).maxTimestamp()));
}
/**
* Tests that the given GABW implementation correctly groups elements into merged sessions.
*/
public static void groupsElementsInMergedSessions(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)));
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "key",
WindowedValue.of(
"v1",
new Instant(0),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(5),
Arrays.asList(window(5, 15)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(15),
Arrays.asList(window(15, 25)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 15)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(window(0, 15).maxTimestamp()));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(15, 25)));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(window(15, 25).maxTimestamp()));
}
/**
* Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per
* session window correctly according to the provided {@link CombineFn}.
*/
public static void combinesElementsPerSession(
GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory,
CombineFn<Long, ?, Long> combineFn)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)));
DoFnTester<KV<String, Iterable<WindowedValue<Long>>>, KV<String, Long>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
1L,
new Instant(0),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
2L,
new Instant(5),
Arrays.asList(window(5, 15)),
PaneInfo.NO_FIRING),
WindowedValue.of(
4L,
new Instant(15),
Arrays.asList(window(15, 25)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Long>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 15)));
assertThat(item0.getValue().getKey(), equalTo("k"));
assertThat(item0.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(1L, 2L))));
assertThat(item0.getTimestamp(), equalTo(window(0, 15).maxTimestamp()));
TimestampedValue<KV<String, Long>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(15, 25)));
assertThat(item1.getValue().getKey(), equalTo("k"));
assertThat(item1.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(4L))));
assertThat(item1.getTimestamp(), equalTo(window(15, 25).maxTimestamp()));
}
/**
* Tests that for a simple sequence of elements on the same key, the given GABW implementation
* correctly groups them according to fixed windows and also sets the output timestamp
* according to the policy {@link OutputTimeFns#outputAtEndOfWindow()}.
*/
public static void groupsElementsIntoFixedWindowsWithEndOfWindowTimestamp(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow());
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "key",
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(2),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(13),
Arrays.asList(window(10, 20)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 10)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(window(0, 10).maxTimestamp()));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(10, 20)));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(window(10, 20).maxTimestamp()));
}
/**
* Tests that for a simple sequence of elements on the same key, the given GABW implementation
* correctly groups them according to fixed windows and also sets the output timestamp
* according to the policy {@link OutputTimeFns#outputAtLatestInputTimestamp()}.
*/
public static void groupsElementsIntoFixedWindowsWithLatestTimestamp(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp());
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(2),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(13),
Arrays.asList(window(10, 20)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 10)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(new Instant(2)));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(10, 20)));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(new Instant(13)));
}
/**
* Tests that the given GABW implementation correctly groups elements into merged sessions
* with output timestamps at the end of the merged window.
*/
public static void groupsElementsInMergedSessionsWithEndOfWindowTimestamp(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow());
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
"v1",
new Instant(0),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(5),
Arrays.asList(window(5, 15)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(15),
Arrays.asList(window(15, 25)),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(0, 15)));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(window(0, 15).maxTimestamp()));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(window(15, 25)));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(window(15, 25).maxTimestamp()));
}
/**
* Tests that the given GABW implementation correctly groups elements into merged sessions
* with output timestamps at the end of the merged window.
*/
public static void groupsElementsInMergedSessionsWithLatestTimestamp(
GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp());
BoundedWindow unmergedWindow = window(15, 25);
DoFnTester<KV<String, Iterable<WindowedValue<String>>>, KV<String, Iterable<String>>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
"v1",
new Instant(0),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v2",
new Instant(5),
Arrays.asList(window(5, 15)),
PaneInfo.NO_FIRING),
WindowedValue.of(
"v3",
new Instant(15),
Arrays.asList(unmergedWindow),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
BoundedWindow mergedWindow = window(0, 15);
TimestampedValue<KV<String, Iterable<String>>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(mergedWindow));
assertThat(item0.getValue().getValue(), containsInAnyOrder("v1", "v2"));
assertThat(item0.getTimestamp(), equalTo(new Instant(5)));
TimestampedValue<KV<String, Iterable<String>>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(unmergedWindow));
assertThat(item1.getValue().getValue(), contains("v3"));
assertThat(item1.getTimestamp(), equalTo(new Instant(15)));
}
/**
* Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per
* session window correctly according to the provided {@link CombineFn}.
*/
public static void combinesElementsPerSessionWithEndOfWindowTimestamp(
GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory,
CombineFn<Long, ?, Long> combineFn)
throws Exception {
WindowingStrategy<?, IntervalWindow> windowingStrategy =
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow());
BoundedWindow secondWindow = window(15, 25);
DoFnTester<?, KV<String, Long>> result =
runGABW(gabwFactory, windowingStrategy, "k",
WindowedValue.of(
1L,
new Instant(0),
Arrays.asList(window(0, 10)),
PaneInfo.NO_FIRING),
WindowedValue.of(
2L,
new Instant(5),
Arrays.asList(window(5, 15)),
PaneInfo.NO_FIRING),
WindowedValue.of(
4L,
new Instant(15),
Arrays.asList(secondWindow),
PaneInfo.NO_FIRING));
assertThat(result.peekOutputElements(), hasSize(2));
BoundedWindow firstResultWindow = window(0, 15);
TimestampedValue<KV<String, Long>> item0 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(firstResultWindow));
assertThat(item0.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(1L, 2L))));
assertThat(item0.getTimestamp(), equalTo(firstResultWindow.maxTimestamp()));
TimestampedValue<KV<String, Long>> item1 =
Iterables.getOnlyElement(result.peekOutputElementsInWindow(secondWindow));
assertThat(item1.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(4L))));
assertThat(item1.getTimestamp(),
equalTo(secondWindow.maxTimestamp()));
}
@SafeVarargs
private static <K, InputT, OutputT, W extends BoundedWindow>
DoFnTester<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> runGABW(
GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory,
WindowingStrategy<?, W> windowingStrategy,
K key,
WindowedValue<InputT>... values) throws Exception {
return runGABW(gabwFactory, windowingStrategy, key, Arrays.asList(values));
}
private static <K, InputT, OutputT, W extends BoundedWindow>
DoFnTester<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> runGABW(
GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory,
WindowingStrategy<?, W> windowingStrategy,
K key,
Collection<WindowedValue<InputT>> values) throws Exception {
TupleTag<KV<K, OutputT>> outputTag = new TupleTag<>();
DoFnRunnerBase.ListOutputManager outputManager = new DoFnRunnerBase.ListOutputManager();
DoFnTester<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> tester =
DoFnTester.of(gabwFactory.forStrategy(windowingStrategy));
tester.startBundle();
tester.processElement(KV.<K, Iterable<WindowedValue<InputT>>>of(key, values));
tester.finishBundle();
// Sanity check for corruption
for (KV<K, OutputT> elem : tester.peekOutputElements()) {
assertThat(elem.getKey(), equalTo(key));
}
return tester;
}
private static BoundedWindow window(long start, long end) {
return new IntervalWindow(new Instant(start), new Instant(end));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.