gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.reclipse.behavior.inference.input;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import org.reclipse.behavior.inference.BehavioralAnalysisPlugin;
import org.reclipse.behavior.inference.BehavioralPatternEntry;
import org.reclipse.behavior.inference.BehavioralPatternsCatalog;
import org.reclipse.behavior.inference.Trigger;
import org.reclipse.behavior.inference.automaton.AbstractState;
import org.reclipse.behavior.inference.automaton.AbstractSymbol;
import org.reclipse.behavior.inference.automaton.Assignment;
import org.reclipse.behavior.inference.automaton.DFA;
import org.reclipse.behavior.inference.automaton.DFAState;
import org.reclipse.behavior.inference.automaton.Transition;
import org.reclipse.behavior.inference.automaton.symbols.MethodCallObject;
import org.reclipse.behavior.inference.automaton.symbols.PermittedMethodCallSymbol;
import org.reclipse.behavior.inference.automaton.symbols.ProhibitedCallerSymbol;
import org.reclipse.behavior.inference.automaton.symbols.ProhibitedMethodCallSymbol;
import org.reclipse.behavior.inference.automaton.symbols.SymbolFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
/**
* @author lowende
* @author Last editor: $Author: mcp $
* @version $Revision: 4140 $ $Date: 2009-09-10 13:13:56 +0200 (Do, 10 Sep 2009) $
*/
public class BehavioralPatternsCatalogSaxHandler extends DefaultHandler
{
private static final String SYSTEM_ID_RESOURCE = "org/reclipse/patterns/behavior/inference/dtds/BehavioralPatternsCatalog.dtd";
private SymbolFactory symbolFactory;
private BehavioralPatternEntry currentBehavioralPattern;
private DFA currentDFA;
private PermittedMethodCallSymbol currentPermittedMethodCallSymbol;
private ProhibitedMethodCallSymbol currentProhibitedMethodCallSymbol;
private ProhibitedCallerSymbol currentProhibitedCallerSymbol;
private HashMap symbols;
private HashMap states;
private HashMap assignments;
private BehavioralPatternsCatalog catalog;
public BehavioralPatternsCatalog getCatalog()
{
return this.catalog;
}
/**
* @throws IOException
* @see org.xml.sax.EntityResolver#resolveEntity(java.lang.String, java.lang.String)
*/
@Override
public InputSource resolveEntity(final String publicId, final String systemId)
throws SAXException,
IOException
{
if (IBehavioralPatternsCatalogConstants.SYSTEM_ID.equals(systemId))
{
ClassLoader classLoader = getClass().getClassLoader();
if (classLoader == null)
{
classLoader = ClassLoader.getSystemClassLoader();
}
final URL systemIdURL = classLoader.getResource(SYSTEM_ID_RESOURCE);
if (systemIdURL != null)
{
try
{
return new InputSource(systemIdURL.openStream());
}
catch (final Exception e)
{
BehavioralAnalysisPlugin
.logError(
"Could not resolve SYSTEM or PUBLIC reference for DTD.",
e);
throw new SAXException(e);
}
}
}
return super.resolveEntity(publicId, systemId);
}
/**
* @see org.xml.sax.ContentHandler#startDocument()
*/
@Override
public void startDocument() throws SAXException
{
this.symbols = new HashMap();
this.states = new HashMap();
this.assignments = new HashMap();
}
/**
* @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String,
* java.lang.String, org.xml.sax.Attributes)
*/
@Override
public void startElement(final String uri, final String name,
final String qName, final Attributes attrs)
{
try
{
if (IBehavioralPatternsCatalogConstants.BEHAVIORAL_PATTERNS_CATALOG_TAG
.equals(name))
{
this.catalog = createBehavioralPatternsCatalog(attrs);
}
else if (IBehavioralPatternsCatalogConstants.BEHAVIORAL_PATTERN_ENTRY_TAG
.equals(name))
{
this.symbols.clear();
this.states.clear();
this.assignments.clear();
this.currentBehavioralPattern = createBehavioralPatternEntry(attrs);
}
else if (IBehavioralPatternsCatalogConstants.DFA_TAG.equals(name))
{
this.currentDFA = createDFA(attrs);
}
else if (IBehavioralPatternsCatalogConstants.PERMITTED_METHOD_CALL_SYMBOL_TAG
.equals(name))
{
this.currentPermittedMethodCallSymbol = createPermittedMethodCallSymbol(attrs);
}
else if (IBehavioralPatternsCatalogConstants.PROHIBITED_METHOD_CALL_SYMBOL_TAG
.equals(name))
{
this.currentProhibitedMethodCallSymbol = createProhibitedMethodCallSymbol(attrs);
}
else if (IBehavioralPatternsCatalogConstants.PROHIBITED_CALLER_SYMBOL_TAG
.equals(name))
{
this.currentProhibitedCallerSymbol = createProhibitedCallersSymbol(attrs);
}
else if (IBehavioralPatternsCatalogConstants.CALLER_TAG.equals(name))
{
createCaller(attrs);
}
else if (IBehavioralPatternsCatalogConstants.CALLEE_TAG.equals(name))
{
createCallee(attrs);
}
else if (IBehavioralPatternsCatalogConstants.PERMITTED_CALLER_TAG
.equals(name))
{
createPermittedCaller(attrs);
}
else if (IBehavioralPatternsCatalogConstants.DFA_STATE_TAG
.equals(name))
{
createDFAState(attrs);
}
else if (IBehavioralPatternsCatalogConstants.ASSIGNMENT_TAG
.equals(name))
{
createAssignment(attrs);
}
else if (IBehavioralPatternsCatalogConstants.TRANSITION_TAG
.equals(name))
{
createTransition(attrs);
}
else if (IBehavioralPatternsCatalogConstants.START_STATE_TAG
.equals(name))
{
createStartState(attrs);
}
else if (IBehavioralPatternsCatalogConstants.REJECTING_STATE_TAG
.equals(name))
{
createRejectingState(attrs);
}
else if (IBehavioralPatternsCatalogConstants.TRIGGER_TAG.equals(name))
{
createTrigger(attrs);
}
}
catch (final Exception e)
{
BehavioralAnalysisPlugin.logError(
"Unexpected exception in parsing tracer input.", e);
}
}
/**
* @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String,
* java.lang.String)
*/
@Override
public void endElement(final String uri, final String name,
final String qName)
{
if (IBehavioralPatternsCatalogConstants.BEHAVIORAL_PATTERN_ENTRY_TAG
.equals(name))
{
if (BehavioralAnalysisPlugin.LOG_INFO)
{
BehavioralAnalysisPlugin.logInfo("Behavioral Pattern loaded: "
+ this.currentBehavioralPattern.getName());
}
this.currentBehavioralPattern = null;
}
else if (IBehavioralPatternsCatalogConstants.DFA_TAG.equals(name))
{
this.currentDFA = null;
}
else if (IBehavioralPatternsCatalogConstants.PERMITTED_METHOD_CALL_SYMBOL_TAG
.equals(name))
{
this.currentDFA.addToSymbols(this.currentPermittedMethodCallSymbol);
this.currentPermittedMethodCallSymbol = null;
}
else if (IBehavioralPatternsCatalogConstants.PROHIBITED_METHOD_CALL_SYMBOL_TAG
.equals(name))
{
this.currentDFA.addToSymbols(this.currentProhibitedMethodCallSymbol);
this.currentProhibitedMethodCallSymbol = null;
}
else if (IBehavioralPatternsCatalogConstants.PROHIBITED_CALLER_SYMBOL_TAG
.equals(name))
{
this.currentDFA.addToSymbols(this.currentProhibitedCallerSymbol);
this.currentProhibitedCallerSymbol = null;
}
}
/**
* @see org.xml.sax.ErrorHandler#warning(org.xml.sax.SAXParseException)
*/
@Override
public void warning(final SAXParseException exception)
{
BehavioralAnalysisPlugin.logError("XML Parse warning in line "
+ exception.getLineNumber() + ":", exception);
}
/**
* @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException)
*/
@Override
public void error(final SAXParseException exception)
{
BehavioralAnalysisPlugin.logError("XML Parse Error in line "
+ exception.getLineNumber() + ":", exception);
}
/**
* @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException)
*/
@Override
public void fatalError(final SAXParseException exception)
{
BehavioralAnalysisPlugin.logError("Fatal XML Parse Error in line "
+ exception.getLineNumber() + ":", exception);
}
private BehavioralPatternsCatalog createBehavioralPatternsCatalog(
final Attributes attrs)
{
final BehavioralPatternsCatalog catalog = new BehavioralPatternsCatalog();
return catalog;
}
private BehavioralPatternEntry createBehavioralPatternEntry(
final Attributes attrs)
{
final BehavioralPatternEntry pattern = new BehavioralPatternEntry();
pattern.setName(attrs
.getValue(IBehavioralPatternsCatalogConstants.NAME_ATTRIBUTE));
final boolean negative = Boolean
.valueOf(
attrs
.getValue(IBehavioralPatternsCatalogConstants.NEGATIVE_ATTRIBUTE))
.booleanValue();
pattern.setNegative(negative);
this.catalog.addToEntries(pattern);
return pattern;
}
private DFA createDFA(final Attributes attrs)
{
// create a new SymbolFactory
this.symbolFactory = new SymbolFactory();
final DFA dfa = new DFA();
this.currentBehavioralPattern.setAutomaton(dfa);
return dfa;
}
private PermittedMethodCallSymbol createPermittedMethodCallSymbol(
final Attributes attrs)
{
final PermittedMethodCallSymbol symbol = new PermittedMethodCallSymbol();
symbol
.setMethodName(attrs
.getValue(IBehavioralPatternsCatalogConstants.METHOD_NAME_ATTRIBUTE));
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
this.symbols.put(id, symbol);
return symbol;
}
private ProhibitedMethodCallSymbol createProhibitedMethodCallSymbol(
final Attributes attrs)
{
final ProhibitedMethodCallSymbol symbol = new ProhibitedMethodCallSymbol();
symbol
.setMethodName(attrs
.getValue(IBehavioralPatternsCatalogConstants.METHOD_NAME_ATTRIBUTE));
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
this.symbols.put(id, symbol);
return symbol;
}
private ProhibitedCallerSymbol createProhibitedCallersSymbol(
final Attributes attrs)
{
final ProhibitedCallerSymbol symbol = new ProhibitedCallerSymbol();
symbol
.setMethodName(attrs
.getValue(IBehavioralPatternsCatalogConstants.METHOD_NAME_ATTRIBUTE));
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
this.symbols.put(id, symbol);
return symbol;
}
private void createCaller(final Attributes attrs)
{
final MethodCallObject object = createMethodCallObject(attrs);
this.currentPermittedMethodCallSymbol.setCaller(object);
}
private void createCallee(final Attributes attrs)
{
final MethodCallObject object = createMethodCallObject(attrs);
if (this.currentPermittedMethodCallSymbol != null)
{
this.currentPermittedMethodCallSymbol.setCallee(object);
}
else if (this.currentProhibitedMethodCallSymbol != null)
{
this.currentProhibitedMethodCallSymbol.setCallee(object);
}
else if (this.currentProhibitedCallerSymbol != null)
{
this.currentProhibitedCallerSymbol.setCallee(object);
}
}
private void createPermittedCaller(final Attributes attrs)
{
final MethodCallObject object = createMethodCallObject(attrs);
this.currentProhibitedCallerSymbol.addToPermittedCallers(object);
}
private MethodCallObject createMethodCallObject(final Attributes attrs)
{
boolean isSet = false;
boolean forEach = false;
if (attrs.getValue(IBehavioralPatternsCatalogConstants.SET_ATTRIBUTE) != null)
{
isSet = true;
}
if (attrs.getValue(IBehavioralPatternsCatalogConstants.FOREACH_ATTRIBUTE) == null)
{
forEach = false;
}
else
{
forEach = Boolean
.parseBoolean(attrs
.getValue(IBehavioralPatternsCatalogConstants.FOREACH_ATTRIBUTE));
}
final MethodCallObject object = this.symbolFactory
.provideMethodCallObject(
attrs
.getValue(IBehavioralPatternsCatalogConstants.NAME_ATTRIBUTE),
attrs
.getValue(IBehavioralPatternsCatalogConstants.TYPE_NAME_ATTRIBUTE),
isSet, forEach);
return object;
}
private void createDFAState(final Attributes attrs)
{
final DFAState dfaState = new DFAState();
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
final String name = attrs
.getValue(IBehavioralPatternsCatalogConstants.NAME_ATTRIBUTE);
if (name != null)
{
dfaState.setName(name);
}
final int type = Integer.valueOf(
attrs.getValue(IBehavioralPatternsCatalogConstants.TYPE_ATTRIBUTE))
.intValue();
switch (type)
{
case 0:
dfaState.setType(AbstractState.NONE);
break;
case 1:
dfaState.setType(AbstractState.ACCEPT);
break;
case 2:
dfaState.setType(AbstractState.REJECT);
break;
}
this.currentDFA.addToStates(dfaState);
this.states.put(id, dfaState);
}
private void createAssignment(final Attributes attrs)
{
final Assignment assignment = new Assignment();
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
final String leftSide = attrs
.getValue(IBehavioralPatternsCatalogConstants.LEFT_SIDE_ATTRIBUTE);
final String rightSide = attrs
.getValue(IBehavioralPatternsCatalogConstants.RIGHT_SIDE_ATTRIBUTE);
if (leftSide != null)
{
assignment.setLeftSide(leftSide);
}
if (rightSide != null)
{
assignment.setRightSide(rightSide);
}
this.currentDFA.addToAssignments(assignment);
this.assignments.put(id, assignment);
}
private void createTransition(final Attributes attrs)
{
final Transition transition = new Transition();
final String previousStateId = attrs
.getValue(IBehavioralPatternsCatalogConstants.PREVIOUS_STATE_ID_ATTRIBUTE);
final DFAState previousState = (DFAState) this.states
.get(previousStateId);
transition.setPreviousState(previousState);
final String nextStateId = attrs
.getValue(IBehavioralPatternsCatalogConstants.NEXT_STATE_ID_ATTRIBUTE);
final DFAState nextState = (DFAState) this.states.get(nextStateId);
transition.setNextState(nextState);
final String symbolId = attrs
.getValue(IBehavioralPatternsCatalogConstants.SYMBOL_ID_ATTRIBUTE);
final AbstractSymbol symbol = (AbstractSymbol) this.symbols.get(symbolId);
transition.setSymbol(symbol);
final String assignmentId = attrs
.getValue(IBehavioralPatternsCatalogConstants.ASSIGNMENT_ID_ATTRIBUTE);
final Assignment assignment = (Assignment) this.assignments
.get(assignmentId);
transition.setAssignment(assignment);
}
private void createStartState(final Attributes attrs)
{
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
final DFAState startState = (DFAState) this.states.get(id);
this.currentDFA.setStartState(startState);
}
private void createRejectingState(final Attributes attrs)
{
final String id = attrs
.getValue(IBehavioralPatternsCatalogConstants.ID_ATTRIBUTE);
final DFAState rejectingState = (DFAState) this.states.get(id);
this.currentDFA.setRejectingState(rejectingState);
}
private void createTrigger(final Attributes attrs)
{
final Trigger trigger = new Trigger();
trigger
.setCallerName(attrs
.getValue(IBehavioralPatternsCatalogConstants.CALLER_NAME_ATTRIBUTE));
final String callerType = attrs
.getValue(IBehavioralPatternsCatalogConstants.CALLER_TYPE_NAME_ATTRIBUTE);
if (callerType != null)
{
trigger.setCallerTypeName(callerType);
}
trigger
.setCalleeName(attrs
.getValue(IBehavioralPatternsCatalogConstants.CALLEE_NAME_ATTRIBUTE));
trigger
.setCalleeTypeName(attrs
.getValue(IBehavioralPatternsCatalogConstants.CALLEE_TYPE_NAME_ATTRIBUTE));
trigger
.setMethodName(attrs
.getValue(IBehavioralPatternsCatalogConstants.METHOD_NAME_ATTRIBUTE));
this.currentBehavioralPattern.addToTriggers(trigger);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.quartz;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.ExtendedStartupListener;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.annotations.Component;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultComponent;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.PropertiesHelper;
import org.apache.camel.util.StringHelper;
import org.quartz.Scheduler;
import org.quartz.SchedulerContext;
import org.quartz.SchedulerException;
import org.quartz.SchedulerFactory;
import org.quartz.TriggerKey;
import org.quartz.impl.StdSchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This component will hold a Quartz Scheduler that will provide scheduled timer based endpoint that generate a
* QuartzMessage to a route. Currently it support Cron and Simple trigger scheduling type.
*/
@Component("quartz")
public class QuartzComponent extends DefaultComponent implements ExtendedStartupListener {
private static final Logger LOG = LoggerFactory.getLogger(QuartzComponent.class);
private final List<SchedulerInitTask> schedulerInitTasks = new ArrayList<>();
private volatile boolean schedulerInitTasksDone;
@Metadata(label = "advanced")
private Scheduler scheduler;
@Metadata(label = "advanced")
private SchedulerFactory schedulerFactory;
@Metadata
private String propertiesRef;
@Metadata
private Map properties;
@Metadata
private String propertiesFile;
@Metadata(label = "scheduler")
private int startDelayedSeconds;
@Metadata(label = "scheduler", defaultValue = "true")
private boolean autoStartScheduler = true;
@Metadata(label = "scheduler")
private boolean interruptJobsOnShutdown;
@Metadata(defaultValue = "true")
private boolean enableJmx = true;
@Metadata
private boolean prefixJobNameWithEndpointId;
@Metadata(defaultValue = "true")
private boolean prefixInstanceName = true;
public QuartzComponent() {
}
public QuartzComponent(CamelContext camelContext) {
super(camelContext);
}
public boolean isAutoStartScheduler() {
return autoStartScheduler;
}
/**
* Whether or not the scheduler should be auto started.
* <p/>
* This options is default true
*/
public void setAutoStartScheduler(boolean autoStartScheduler) {
this.autoStartScheduler = autoStartScheduler;
}
public int getStartDelayedSeconds() {
return startDelayedSeconds;
}
/**
* Seconds to wait before starting the quartz scheduler.
*/
public void setStartDelayedSeconds(int startDelayedSeconds) {
this.startDelayedSeconds = startDelayedSeconds;
}
public boolean isPrefixJobNameWithEndpointId() {
return prefixJobNameWithEndpointId;
}
/**
* Whether to prefix the quartz job with the endpoint id.
* <p/>
* This option is default false.
*/
public void setPrefixJobNameWithEndpointId(boolean prefixJobNameWithEndpointId) {
this.prefixJobNameWithEndpointId = prefixJobNameWithEndpointId;
}
public boolean isEnableJmx() {
return enableJmx;
}
/**
* Whether to enable Quartz JMX which allows to manage the Quartz scheduler from JMX.
* <p/>
* This options is default true
*/
public void setEnableJmx(boolean enableJmx) {
this.enableJmx = enableJmx;
}
public String getPropertiesRef() {
return propertiesRef;
}
/**
* References to an existing {@link Properties} or {@link Map} to lookup in the registry to use for configuring
* quartz.
*/
public void setPropertiesRef(String propertiesRef) {
this.propertiesRef = propertiesRef;
}
public Map getProperties() {
return properties;
}
/**
* Properties to configure the Quartz scheduler.
*/
public void setProperties(Map properties) {
this.properties = properties;
}
public String getPropertiesFile() {
return propertiesFile;
}
/**
* File name of the properties to load from the classpath
*/
public void setPropertiesFile(String propertiesFile) {
this.propertiesFile = propertiesFile;
}
public boolean isPrefixInstanceName() {
return prefixInstanceName;
}
/**
* Whether to prefix the Quartz Scheduler instance name with the CamelContext name.
* <p/>
* This is enabled by default, to let each CamelContext use its own Quartz scheduler instance by default. You can
* set this option to <tt>false</tt> to reuse Quartz scheduler instances between multiple CamelContext's.
*/
public void setPrefixInstanceName(boolean prefixInstanceName) {
this.prefixInstanceName = prefixInstanceName;
}
public boolean isInterruptJobsOnShutdown() {
return interruptJobsOnShutdown;
}
/**
* Whether to interrupt jobs on shutdown which forces the scheduler to shutdown quicker and attempt to interrupt any
* running jobs. If this is enabled then any running jobs can fail due to being interrupted. When a job is
* interrupted then Camel will mark the exchange to stop continue routing and set
* {@link java.util.concurrent.RejectedExecutionException} as caused exception. Therefore use this with care, as its
* often better to allow Camel jobs to complete and shutdown gracefully.
*/
public void setInterruptJobsOnShutdown(boolean interruptJobsOnShutdown) {
this.interruptJobsOnShutdown = interruptJobsOnShutdown;
}
public SchedulerFactory getSchedulerFactory() {
if (schedulerFactory == null) {
try {
schedulerFactory = createSchedulerFactory();
} catch (SchedulerException e) {
throw new RuntimeException(e);
}
}
return schedulerFactory;
}
private SchedulerFactory createSchedulerFactory() throws SchedulerException {
SchedulerFactory answer;
Properties prop = loadProperties();
if (prop != null) {
// force disabling update checker (will do online check over the internet)
prop.put("org.quartz.scheduler.skipUpdateCheck", "true");
prop.put("org.terracotta.quartz.skipUpdateCheck", "true");
// camel context name will be a suffix to use one scheduler per context
if (isPrefixInstanceName()) {
String instName = createInstanceName(prop);
prop.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, instName);
}
if (isInterruptJobsOnShutdown()) {
prop.setProperty(StdSchedulerFactory.PROP_SCHED_INTERRUPT_JOBS_ON_SHUTDOWN, "true");
}
// enable jmx unless configured to not do so
if (enableJmx && !prop.containsKey("org.quartz.scheduler.jmx.export")) {
prop.put("org.quartz.scheduler.jmx.export", "true");
LOG.info("Setting org.quartz.scheduler.jmx.export=true to ensure QuartzScheduler(s) will be enlisted in JMX.");
}
answer = new StdSchedulerFactory(prop);
} else {
// read default props to be able to use a single scheduler per camel context
// if we need more than one scheduler per context use setScheduler(Scheduler)
// or setFactory(SchedulerFactory) methods
// must use classloader from StdSchedulerFactory to work even in OSGi
InputStream is = org.apache.camel.util.ObjectHelper.loadResourceAsStream("org/quartz/quartz.properties");
if (is == null) {
throw new SchedulerException("Quartz properties file not found in classpath: org/quartz/quartz.properties");
}
prop = new Properties();
try {
prop.load(is);
} catch (IOException e) {
throw new SchedulerException(
"Error loading Quartz properties file from classpath: org/quartz/quartz.properties", e);
} finally {
IOHelper.close(is);
}
// camel context name will be a suffix to use one scheduler per context
if (isPrefixInstanceName()) {
// camel context name will be a suffix to use one scheduler per context
String instName = createInstanceName(prop);
prop.setProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, instName);
}
// force disabling update checker (will do online check over the internet)
prop.put("org.quartz.scheduler.skipUpdateCheck", "true");
prop.put("org.terracotta.quartz.skipUpdateCheck", "true");
if (isInterruptJobsOnShutdown()) {
prop.setProperty(StdSchedulerFactory.PROP_SCHED_INTERRUPT_JOBS_ON_SHUTDOWN, "true");
}
// enable jmx unless configured to not do so
if (enableJmx && !prop.containsKey("org.quartz.scheduler.jmx.export")) {
prop.put("org.quartz.scheduler.jmx.export", "true");
LOG.info("Setting org.quartz.scheduler.jmx.export=true to ensure QuartzScheduler(s) will be enlisted in JMX.");
}
answer = new StdSchedulerFactory(prop);
}
if (LOG.isDebugEnabled()) {
String name = prop.getProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME);
LOG.debug("Creating SchedulerFactory: {} with properties: {}", name, prop);
}
return answer;
}
protected String createInstanceName(Properties prop) {
String instName = prop.getProperty(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME);
// camel context name will be a suffix to use one scheduler per context
String identity = QuartzHelper.getQuartzContextName(getCamelContext());
if (identity != null) {
if (instName == null) {
instName = "scheduler-" + identity;
} else {
instName = instName + "-" + identity;
}
}
return instName;
}
/**
* Is the quartz scheduler clustered?
*/
public boolean isClustered() throws SchedulerException {
return getScheduler().getMetaData().isJobStoreClustered();
}
private Properties loadProperties() throws SchedulerException {
Properties answer = null;
if (getProperties() != null) {
answer = new Properties();
answer.putAll(getProperties());
}
if (answer == null && getPropertiesRef() != null) {
Map map = CamelContextHelper.mandatoryLookup(getCamelContext(), getPropertiesRef(), Map.class);
answer = new Properties();
answer.putAll(map);
}
if (answer == null && getPropertiesFile() != null) {
LOG.info("Loading Quartz properties file from: {}", getPropertiesFile());
InputStream is = null;
try {
is = ResourceHelper.resolveMandatoryResourceAsInputStream(getCamelContext(), getPropertiesFile());
answer = new Properties();
answer.load(is);
} catch (IOException e) {
throw new SchedulerException("Error loading Quartz properties file: " + getPropertiesFile(), e);
} finally {
IOHelper.close(is);
}
}
return answer;
}
/**
* To use the custom SchedulerFactory which is used to create the Scheduler.
*/
public void setSchedulerFactory(SchedulerFactory schedulerFactory) {
this.schedulerFactory = schedulerFactory;
}
public Scheduler getScheduler() {
return scheduler;
}
/**
* Adds a task to be executed as part of initializing and starting the scheduler; or executes the task if the
* scheduler has already been started.
*/
public void addScheduleInitTask(SchedulerInitTask task) {
if (schedulerInitTasksDone) {
// task already done then run task now
try {
task.initializeTask(scheduler);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
} else {
this.schedulerInitTasks.add(task);
}
}
/**
* To use the custom configured Quartz scheduler, instead of creating a new Scheduler.
*/
public void setScheduler(Scheduler scheduler) {
this.scheduler = scheduler;
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// Get couple of scheduler settings
Integer startDelayedSeconds = getAndRemoveParameter(parameters, "startDelayedSeconds", Integer.class);
if (startDelayedSeconds != null) {
if (this.startDelayedSeconds != 0 && !(this.startDelayedSeconds == startDelayedSeconds)) {
LOG.warn("A Quartz job is already configured with a different 'startDelayedSeconds' configuration! "
+ "All Quartz jobs must share the same 'startDelayedSeconds' configuration! Cannot apply the 'startDelayedSeconds' configuration!");
} else {
this.startDelayedSeconds = startDelayedSeconds;
}
}
Boolean autoStartScheduler = getAndRemoveParameter(parameters, "autoStartScheduler", Boolean.class);
if (autoStartScheduler != null) {
this.autoStartScheduler = autoStartScheduler;
}
Boolean prefixJobNameWithEndpointId = getAndRemoveParameter(parameters, "prefixJobNameWithEndpointId", Boolean.class);
if (prefixJobNameWithEndpointId != null) {
this.prefixJobNameWithEndpointId = prefixJobNameWithEndpointId;
}
// Extract trigger.XXX and job.XXX properties to be set on endpoint below
Map<String, Object> triggerParameters = PropertiesHelper.extractProperties(parameters, "trigger.");
Map<String, Object> jobParameters = PropertiesHelper.extractProperties(parameters, "job.");
// Create quartz endpoint
QuartzEndpoint result = new QuartzEndpoint(uri, this);
TriggerKey triggerKey = createTriggerKey(uri, remaining, result);
result.setTriggerKey(triggerKey);
result.setTriggerParameters(triggerParameters);
result.setJobParameters(jobParameters);
if (startDelayedSeconds != null) {
result.setStartDelayedSeconds(startDelayedSeconds);
}
if (autoStartScheduler != null) {
result.setAutoStartScheduler(autoStartScheduler);
}
if (prefixJobNameWithEndpointId != null) {
result.setPrefixJobNameWithEndpointId(prefixJobNameWithEndpointId);
}
// special for cron where we replace + as space
String cron = getAndRemoveParameter(parameters, "cron", String.class);
if (cron != null) {
// replace + as space
cron = cron.replace('+', ' ');
result.setCron(cron);
}
setProperties(result, parameters);
return result;
}
private TriggerKey createTriggerKey(String uri, String remaining, QuartzEndpoint endpoint) throws Exception {
// Parse uri for trigger name and group
URI u = new URI(uri);
String path = StringHelper.after(u.getPath(), "/");
String host = u.getHost();
// host can be null if the uri did contain invalid host characters such as an underscore
if (host == null) {
host = StringHelper.before(remaining, "/");
if (host == null) {
host = remaining;
}
}
// Trigger group can be optional, if so set it to this context's unique name
String name;
String group;
if (ObjectHelper.isNotEmpty(path) && ObjectHelper.isNotEmpty(host)) {
group = host;
name = path;
} else {
String camelContextName = QuartzHelper.getQuartzContextName(getCamelContext());
group = camelContextName == null ? "Camel" : "Camel_" + camelContextName;
name = host;
}
if (prefixJobNameWithEndpointId) {
name = endpoint.getId() + "_" + name;
}
return new TriggerKey(name, group);
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (scheduler == null) {
createAndInitScheduler();
}
}
private void createAndInitScheduler() throws SchedulerException {
LOG.info("Create and initializing scheduler.");
scheduler = createScheduler();
SchedulerContext quartzContext = storeCamelContextInQuartzContext();
// Set camel job counts to zero. We needed this to prevent shutdown in case there are multiple Camel contexts
// that has not completed yet, and the last one with job counts to zero will eventually shutdown.
quartzContext.computeIfAbsent(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT, k -> new AtomicInteger());
}
private SchedulerContext storeCamelContextInQuartzContext() throws SchedulerException {
// Store CamelContext into QuartzContext space
SchedulerContext quartzContext = scheduler.getContext();
String camelContextName = QuartzHelper.getQuartzContextName(getCamelContext());
LOG.debug("Storing camelContextName={} into Quartz Context space.", camelContextName);
quartzContext.put(QuartzConstants.QUARTZ_CAMEL_CONTEXT + "-" + camelContextName, getCamelContext());
return quartzContext;
}
private Scheduler createScheduler() throws SchedulerException {
return getSchedulerFactory().getScheduler();
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (scheduler != null) {
if (isInterruptJobsOnShutdown()) {
LOG.info("Shutting down scheduler. (will interrupts jobs to shutdown quicker.)");
scheduler.shutdown(false);
scheduler = null;
} else {
AtomicInteger number = (AtomicInteger) scheduler.getContext().get(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT);
if (number != null && number.get() > 0) {
LOG.info("Cannot shutdown scheduler: {} as there are still {} jobs registered.",
scheduler.getSchedulerName(), number.get());
} else {
LOG.info("Shutting down scheduler. (will wait for all jobs to complete first.)");
scheduler.shutdown(true);
scheduler = null;
}
}
}
}
@Override
public void onCamelContextStarted(CamelContext context, boolean alreadyStarted) throws Exception {
if (alreadyStarted) {
// a route may have been added or starter after CamelContext is started so ensure we startup the scheduler
doStartScheduler();
}
}
@Override
public void onCamelContextFullyStarted(CamelContext context, boolean alreadyStarted) throws Exception {
doStartScheduler();
}
protected void doStartScheduler() throws Exception {
// If Camel has already started and then user add a route dynamically, we need to ensure
// to create and init the scheduler first.
if (scheduler == null) {
createAndInitScheduler();
} else {
// in case custom scheduler was injected (i.e. created elsewhere), we may need to add
// current camel context to quartz context so jobs have access
storeCamelContextInQuartzContext();
}
// initialize scheduler tasks
for (SchedulerInitTask task : schedulerInitTasks) {
task.initializeTask(scheduler);
}
// cleanup tasks as they need only to be triggered once
schedulerInitTasks.clear();
schedulerInitTasksDone = true;
// Now scheduler is ready, let see how we should start it.
if (!autoStartScheduler) {
LOG.info("Not starting scheduler because autoStartScheduler is set to false.");
} else {
if (startDelayedSeconds > 0) {
if (scheduler.isStarted()) {
LOG.warn("The scheduler has already started. Cannot apply the 'startDelayedSeconds' configuration!");
} else {
LOG.info("Starting scheduler with startDelayedSeconds={}", startDelayedSeconds);
scheduler.startDelayed(startDelayedSeconds);
}
} else {
if (scheduler.isStarted()) {
LOG.info("The scheduler has already been started.");
} else {
LOG.info("Starting scheduler.");
scheduler.start();
}
}
}
}
}
| |
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//
package org.springframework.social.connect.jpa.hibernate;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.social.connect.ConnectionKey;
import org.springframework.social.connect.DuplicateConnectionException;
import org.springframework.social.connect.jpa.JpaTemplate;
import org.springframework.social.connect.jpa.RemoteUser;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.MultiValueMap;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.Map.Entry;
public abstract class AbstractUserConnectionJpaDao<U extends AbstractUserConnection<?>> implements JpaTemplate {
private Class<U> persistentClass;
@PersistenceContext
private EntityManager entityManager;
public Class<U> getPersistentClass() {
return this.persistentClass;
}
public AbstractUserConnectionJpaDao(Class<U> persistentClass) {
this.persistentClass = persistentClass;
}
protected String getProviderIdJpql() {
return "u.primaryKey.providerId";
}
protected String getUserIdJpql() {
return "u.primaryKey.userId";
}
protected String getProviderUserIdJpql() {
return "u.primaryKey.providerUserId";
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public Set<String> findUsersConnectedTo(String providerId, Set<String> providerUserIds) {
TypedQuery query = this.entityManager.createQuery("select " + this.getUserIdJpql() + " from UserConnection u where " + this.getProviderIdJpql() + " = :providerId and " + this.getProviderUserIdJpql() + " in (:providerUserIds)", String.class);
query.setParameter("providerId", providerId);
query.setParameter("providerUserIds", providerUserIds);
List users = query.getResultList();
HashSet userIds = new HashSet();
Iterator i$ = users.iterator();
while(i$.hasNext()) {
String userId = (String)i$.next();
if(!userIds.contains(userId)) {
userIds.add(userId);
}
}
return userIds;
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public List<RemoteUser> getPrimary(String userId, String providerId) {
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getUserIdJpql() + " = :userId and " + this.getProviderIdJpql() + " = :providerId order by u.rank", RemoteUser.class);
query.setParameter("userId", userId);
query.setParameter("providerId", providerId);
return query.getResultList();
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public int getRank(String userId, String providerId) {
TypedQuery query = this.entityManager.createQuery("select max(u.rank) from UserConnection u where " + this.getUserIdJpql() + "= :userId and " + this.getProviderIdJpql() + " = :providerId", Integer.class);
query.setParameter("userId", userId);
query.setParameter("providerId", providerId);
Integer result = (Integer)query.getSingleResult();
return result == null?1:result.intValue() + 1;
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public List<RemoteUser> getAll(String userId, MultiValueMap<String, String> providerUsers) {
ArrayList userList = new ArrayList();
Iterator i$ = providerUsers.entrySet().iterator();
while(i$.hasNext()) {
Entry entry = (Entry)i$.next();
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getUserIdJpql() + " = :userId and " + this.getProviderIdJpql() + "= :providerId and " + this.getProviderUserIdJpql() + " in (:providerUserIds) order by u.rank", RemoteUser.class);
query.setParameter("userId", userId);
query.setParameter("providerId", entry.getKey());
query.setParameter("providerUserIds", entry.getValue());
userList.addAll(query.getResultList());
}
return userList;
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public List<RemoteUser> getAll(String userId) {
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getUserIdJpql() + " = :userId order by u.rank", RemoteUser.class);
query.setParameter("userId", userId);
return query.getResultList();
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public List<RemoteUser> getAll(String userId, String providerId) {
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getUserIdJpql() + " = :userId and " + this.getProviderIdJpql() + "= :providerId order by u.rank", RemoteUser.class);
query.setParameter("userId", userId);
query.setParameter("providerId", providerId);
return query.getResultList();
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public RemoteUser get(String userId, String providerId, String providerUserId) {
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getUserIdJpql() + " = :userId and " + this.getProviderIdJpql() + "= :providerId and " + this.getProviderUserIdJpql() + " = :providerUserId", RemoteUser.class);
query.setParameter("userId", userId);
query.setParameter("providerId", providerId);
query.setParameter("providerUserId", providerUserId);
List userList = query.getResultList();
if(userList.size() == 0) {
throw new EmptyResultDataAccessException(1);
} else {
return (RemoteUser)userList.get(0);
}
}
@Transactional(
readOnly = true,
propagation = Propagation.REQUIRED
)
public List<RemoteUser> get(String providerId, String providerUserId) {
TypedQuery query = this.entityManager.createQuery("select u from UserConnection u where " + this.getProviderIdJpql() + " = :providerId and " + this.getProviderUserIdJpql() + " = :providerUserId order by u.rank", RemoteUser.class);
query.setParameter("providerId", providerId);
query.setParameter("providerUserId", providerUserId);
return query.getResultList();
}
@Transactional(
readOnly = false,
propagation = Propagation.REQUIRED
)
public void remove(String userId, String providerId) {
Iterator i$ = this.getAll(userId, providerId).iterator();
while(i$.hasNext()) {
RemoteUser remoteUser = (RemoteUser)i$.next();
this.entityManager.remove(remoteUser);
}
}
@Transactional(
readOnly = false,
propagation = Propagation.REQUIRED
)
public void remove(String userId, String providerId, String providerUserId) {
try {
this.entityManager.remove(this.get(userId, providerId, providerUserId));
} catch (EmptyResultDataAccessException var5) {
;
}
}
protected abstract U createNewUserConnection(String var1, String var2, String var3, int var4, String var5, String var6, String var7, String var8, String var9, String var10, Long var11);
protected void setDefaultProperties(U userConnection, String userId, String providerId, String providerUserId, int rank, String displayName, String profileUrl, String imageUrl, String accessToken, String secret, String refreshToken, Long expireTime) {
userConnection.setUserId(userId);
userConnection.setProviderId(providerId);
userConnection.setProviderUserId(providerUserId);
userConnection.setRank(rank);
userConnection.setDisplayName(displayName);
userConnection.setProfileUrl(profileUrl);
userConnection.setImageUrl(imageUrl);
userConnection.setAccessToken(accessToken);
userConnection.setSecret(secret);
userConnection.setRefreshToken(refreshToken);
userConnection.setExpireTime(expireTime);
}
@Transactional(
readOnly = false,
propagation = Propagation.REQUIRED
)
public RemoteUser createRemoteUser(String userId, String providerId, String providerUserId, int rank, String displayName, String profileUrl, String imageUrl, String accessToken, String secret, String refreshToken, Long expireTime) {
AbstractUserConnection remoteUser = this.createNewUserConnection(userId, providerId, providerUserId, rank, displayName, profileUrl, imageUrl, accessToken, secret, refreshToken, expireTime);
try {
RemoteUser e = this.get(userId, providerId, providerUserId);
if(e != null) {
throw new DuplicateConnectionException(new ConnectionKey(providerId, providerUserId));
}
} catch (EmptyResultDataAccessException var14) {
;
}
this.save(remoteUser);
return remoteUser;
}
@Transactional(
readOnly = false,
propagation = Propagation.REQUIRED
)
public RemoteUser save(RemoteUser user) {
this.entityManager.merge(user);
return user;
}
}
| |
package de.flexiprovider.pqc.ots.merkle;
import codec.CorruptedCodeException;
import codec.asn1.ASN1ObjectIdentifier;
import codec.asn1.ASN1OctetString;
import codec.asn1.ASN1Sequence;
import codec.pkcs8.PrivateKeyInfo;
import codec.x509.SubjectPublicKeyInfo;
import de.flexiprovider.api.exceptions.InvalidKeyException;
import de.flexiprovider.api.exceptions.InvalidKeySpecException;
import de.flexiprovider.api.keys.Key;
import de.flexiprovider.api.keys.KeyFactory;
import de.flexiprovider.api.keys.KeySpec;
import de.flexiprovider.api.keys.PrivateKey;
import de.flexiprovider.api.keys.PublicKey;
import de.flexiprovider.common.util.ASN1Tools;
import de.flexiprovider.pki.PKCS8EncodedKeySpec;
import de.flexiprovider.pki.X509EncodedKeySpec;
/**
* This class is able to transform MerkleOTS keys and MerkleOTS key
* specifications into a form that can be used with the FlexiProvider.
*
* @author Elena Klintsevich
* @see MerkleOTSPrivateKey
* @see MerkleOTSPublicKey
*/
public class MerkleOTSKeyFactory extends KeyFactory {
/**
* The OID of MerkleOTS.
*/
public static final String OID = "1.3.6.1.4.1.8301.3.1.3.1.1";
/**
* Converts, if possible, a key specification into a
* {@link MerkleOTSPublicKey}. Currently, the following key specifications
* are supported: {@link MerkleOTSPublicKeySpec}.
*
* @param keySpec
* the key specification
* @return a public Merkle OTS key
* @throws InvalidKeySpecException
* if the KeySpec is not supported.
* @see MerkleOTSPublicKey
*/
public PublicKey generatePublic(KeySpec keySpec)
throws InvalidKeySpecException {
if (keySpec instanceof MerkleOTSPublicKeySpec) {
MerkleOTSPublicKeySpec pubKeySpec = (MerkleOTSPublicKeySpec) keySpec;
return new MerkleOTSPublicKey(pubKeySpec.getOIDString(), pubKeySpec
.getKeyBytes());
} else if (keySpec instanceof X509EncodedKeySpec) {
// get the DER-encoded key according to X.509 from the spec
byte[] encKey = ((X509EncodedKeySpec) keySpec).getEncoded();
// decode the SubjectPublicKeyInfo data structure to the pki object
SubjectPublicKeyInfo spki = new SubjectPublicKeyInfo();
try {
ASN1Tools.derDecode(encKey, spki);
} catch (Exception ce) {
throw new InvalidKeySpecException(
"Unable to decode X509EncodedKeySpec.");
}
try {
// build and return the actual key
ASN1Sequence pubKey = (ASN1Sequence) spki.getDecodedRawKey();
// decode oidString
String oidString = ((ASN1ObjectIdentifier) pubKey.get(0))
.toString();
// decode public key bytes
ASN1Sequence keySequence = (ASN1Sequence) pubKey.get(1);
byte[][] pubKeyBytes = new byte[keySequence.size()][];
for (int i = 0; i < pubKeyBytes.length; i++) {
pubKeyBytes[i] = ((ASN1OctetString) keySequence.get(i))
.getByteArray();
}
return new MerkleOTSPublicKey(oidString, pubKeyBytes);
} catch (CorruptedCodeException cce) {
throw new InvalidKeySpecException(
"Unable to decode X509EncodedKeySpec.");
}
}
throw new InvalidKeySpecException("Unknown KeySpec type.");
}
/**
* Converts, if possible, a key specification into a
* {@link MerkleOTSPrivateKey}. Currently, the following key specifications
* are supported: {@link MerkleOTSPrivateKeySpec}.
*
* @param keySpec
* the key specification
* @return a private Merkle OTS key
* @throws InvalidKeySpecException
* if the KeySpec is not supported.
* @see MerkleOTSPrivateKey
*/
public PrivateKey generatePrivate(KeySpec keySpec)
throws InvalidKeySpecException {
if (keySpec instanceof MerkleOTSPrivateKeySpec) {
MerkleOTSPrivateKeySpec privKeySpec = (MerkleOTSPrivateKeySpec) keySpec;
return new MerkleOTSPrivateKey(privKeySpec.getOIDString(),
privKeySpec.getKeyBytes());
} else if (keySpec instanceof PKCS8EncodedKeySpec) {
// get the DER-encoded key according to PKCS#8 from the spec
byte[] encKey = ((PKCS8EncodedKeySpec) keySpec).getEncoded();
// decode the PKCS#8 data structure to the pki object
PrivateKeyInfo pki = new PrivateKeyInfo();
try {
ASN1Tools.derDecode(encKey, pki);
} catch (Exception ce) {
throw new InvalidKeySpecException(
"Unable to decode PKCS8EncodedKeySpec.");
}
try {
// build and return the actual key
ASN1Sequence privKey = (ASN1Sequence) pki.getDecodedRawKey();
// decode oidString
String oidString = ((ASN1ObjectIdentifier) privKey.get(0))
.toString();
// decode private key bytes
ASN1Sequence keySequence = (ASN1Sequence) privKey.get(1);
byte[][] privKeyBytes = new byte[keySequence.size()][];
for (int i = 0; i < privKeyBytes.length; i++) {
privKeyBytes[i] = ((ASN1OctetString) keySequence.get(i))
.getByteArray();
}
return new MerkleOTSPrivateKey(oidString, privKeyBytes);
} catch (CorruptedCodeException cce) {
throw new InvalidKeySpecException(
"Unable to decode PKCS8EncodedKeySpec.");
}
}
throw new InvalidKeySpecException("Unknown KeySpec type.");
}
/**
* Converts a given key into a key specification, if possible. Currently the
* following specifications are supported:
* <UL>
* <LI> for MerkleOTSPublicKey: X509EncodedKeySpec, OTSKeySpec
* <LI> for OTSPrivateKey: PKCS8EncodedKeySpec, OTSKeySpec.
* </UL>
* <p>
*
* @see MerkleOTSPrivateKey
* @see MerkleOTSPublicKey
* @param key
* the key.
* @param spec
* the class of which type the returned class should be.
* @return OTSKeySpec the specification of the MerkleOTS key.
* @throws InvalidKeySpecException
* if the specification is not supported.
*/
public KeySpec getKeySpec(Key key, Class spec)
throws InvalidKeySpecException {
if (key instanceof MerkleOTSPrivateKey) {
MerkleOTSPrivateKey privKey = (MerkleOTSPrivateKey) key;
if (PKCS8EncodedKeySpec.class.isAssignableFrom(spec)) {
return new PKCS8EncodedKeySpec(key.getEncoded());
} else if (MerkleOTSPrivateKeySpec.class.isAssignableFrom(spec)) {
return new MerkleOTSPrivateKeySpec(privKey.getOIDString(),
privKey.getKeyBytes());
}
} else if (key instanceof MerkleOTSPublicKey) {
if (X509EncodedKeySpec.class.isAssignableFrom(spec)) {
return new X509EncodedKeySpec(key.getEncoded());
} else if (MerkleOTSPublicKeySpec.class.isAssignableFrom(spec)) {
MerkleOTSPublicKey pubKey = (MerkleOTSPublicKey) key;
return new MerkleOTSPublicKeySpec(pubKey.getOIDString(), pubKey
.getKeyBytes());
}
}
throw new InvalidKeySpecException("Unknown KeySpec.");
}
/**
* Translates a key into a form known by the FlexiProvider. Currently the
* following "source" keys are supported: OTSPrivateKey, MerkleOTSPublicKey.
*
* @param key
* the key.
* @return a key of a known key type.
* @throws InvalidKeyException
* if the key is not supported.
*/
public Key translateKey(Key key) throws InvalidKeyException {
if (key instanceof MerkleOTSPrivateKey) {
return key;
} else if (key instanceof MerkleOTSPublicKey) {
return key;
}
throw new InvalidKeyException("Unsupported key type.");
}
}
| |
/*
* Copyright 2009-2016 European Molecular Biology Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package uk.ac.ebi.biostudies.utils.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.ac.ebi.biostudies.utils.saxon.search.IQueryExpander;
import uk.ac.ebi.biostudies.utils.saxon.search.IndexEnvironment;
import uk.ac.ebi.biostudies.utils.saxon.search.QueryInfo;
import java.io.IOException;
import java.util.List;
public final class EFOQueryExpander implements IQueryExpander {
// logging machinery
private final Logger logger = LoggerFactory.getLogger(getClass());
private IEFOExpansionLookup lookup;
public EFOQueryExpander(IEFOExpansionLookup lookup) {
this.lookup = lookup;
}
public QueryInfo newQueryInfo() {
return new EFOExpandableQueryInfo();
}
public Query expandQuery(IndexEnvironment env, QueryInfo info) throws IOException {
EFOExpandableQueryInfo queryInfo = null;
if (info instanceof EFOExpandableQueryInfo) {
queryInfo = (EFOExpandableQueryInfo) info;
}
if (null != queryInfo) {
queryInfo.setOriginalQuery(queryInfo.getQuery());
return expand(env, queryInfo, queryInfo.getQuery());
} else {
return info.getQuery();
}
}
private Query expand(IndexEnvironment env, EFOExpandableQueryInfo queryInfo, Query query) throws IOException {
Query result;
if (query instanceof BooleanQuery) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
List<BooleanClause> clauses = ((BooleanQuery) query).clauses();
for (BooleanClause c : clauses) {
builder.add(
expand(env, queryInfo, c.getQuery())
, c.getOccur()
);
}
result = builder.build();
} else if (query instanceof PrefixQuery || query instanceof WildcardQuery) {
// we don't expand prefix or wildcard queries yet (because there are side-effects
// we need to take care of first
// for example, for prefix query will found multi-worded terms which, well, is wrong
return query;
} else {
result = doExpand(env, queryInfo, query);
}
return result;
}
private Query doExpand(IndexEnvironment env, EFOExpandableQueryInfo queryInfo, Query query) throws IOException {
String field = getQueryField(query);
if (null != field) {
if (env.fields.containsKey(field) && "string".equalsIgnoreCase(env.fields.get(field).type) && env.fields.get(field).shouldExpand) {
EFOExpansionTerms expansionTerms = lookup.getExpansionTerms(query);
if (1000 < expansionTerms.efo.size() + expansionTerms.synonyms.size()
&& !queryInfo.getParams().containsKey("expand")) {
queryInfo.getParams().put("tooManyExpansionTerms", new String[]{"true"});
} else if (0 != expansionTerms.efo.size() || 0 != expansionTerms.synonyms.size()) {
BooleanQuery.Builder boolQueryBuilder = new BooleanQuery.Builder();
boolQueryBuilder.add(query, BooleanClause.Occur.SHOULD);
for (String term : expansionTerms.synonyms) {
Query synonymPart = newQueryFromString(term.trim(), field);
if (!queryPartIsRedundant(query, synonymPart)) {
boolQueryBuilder.add(synonymPart, BooleanClause.Occur.SHOULD);
queryInfo.addToSynonymPartQuery(synonymPart);
}
}
for (String term : expansionTerms.efo) {
Query expansionPart = newQueryFromString(term.trim(), field);
boolQueryBuilder.add(expansionPart, BooleanClause.Occur.SHOULD);
queryInfo.addToEfoExpansionPartQuery(expansionPart);
}
return boolQueryBuilder.build();
}
}
}
return query;
}
private String getQueryField(Query query) {
String field = null;
try {
if (query instanceof PrefixQuery) {
field = ((PrefixQuery) query).getPrefix().field();
} else if (query instanceof WildcardQuery) {
field = ((WildcardQuery) query).getTerm().field();
} else if (query instanceof TermRangeQuery) {
field = ((TermRangeQuery) query).getField();
} else if (query instanceof FuzzyQuery) {
field = ((FuzzyQuery) query).getTerm().field();
} else if (query instanceof TermQuery) {
field = ((TermQuery) query).getTerm().field();
} else if (query instanceof PhraseQuery) {
Term[] terms = ((PhraseQuery)query).getTerms();
if (0 == terms.length) {
logger.error("No terms found for query [{}]", query.toString());
return null;
}
field = terms[0].field();
} else {
logger.error("Unsupported class [{}] for query [{}]", query.getClass().getName(), query.toString());
return null;
}
} catch (UnsupportedOperationException x) {
logger.error("Query of [{}], class [{}] doesn't allow us to get its terms extracted", query.toString(), query.getClass().getCanonicalName());
}
return field;
}
public Query newQueryFromString(String text, String field) {
if (text.contains(" ")) {
String[] tokens = text.split("\\s+");
PhraseQuery.Builder builder = new PhraseQuery.Builder();
for (String token : tokens) {
builder.add(new Term(field, token));
}
return builder.build();
} else {
return new TermQuery(new Term(field, text));
}
}
private boolean queryPartIsRedundant(Query query, Query part) {
Term partTerm = getFirstTerm(part);
if (query instanceof PrefixQuery) {
Term prefixTerm = ((PrefixQuery) query).getPrefix();
return prefixTerm.field().equals(partTerm.field()) && (partTerm.text().startsWith(prefixTerm.text()));
} else if (query instanceof WildcardQuery) {
Term wildcardTerm = ((WildcardQuery) query).getTerm();
String wildcard = "^" + wildcardTerm.text().replaceAll("\\?", "\\.").replaceAll("\\*", "\\.*") + "$";
return wildcardTerm.field().equals(partTerm.field()) && (partTerm.text().matches(wildcard));
} else {
return query.toString().equals(part.toString());
}
}
private Term getFirstTerm(Query query) {
Term term = new Term("", "");
if (query instanceof BooleanQuery) {
List<BooleanClause> clauses = ((BooleanQuery)query).clauses();
if (0 < clauses.size()) {
return getFirstTerm(clauses.get(0).getQuery());
} else {
return term;
}
} else if (query instanceof PrefixQuery) {
term = ((PrefixQuery) query).getPrefix();
} else if (query instanceof WildcardQuery) {
term = ((WildcardQuery) query).getTerm();
} else if (query instanceof TermRangeQuery) {
term = new Term(((TermRangeQuery) query).getField(), "");
} else if (query instanceof FuzzyQuery) {
term = ((FuzzyQuery) query).getTerm();
} else if (query instanceof TermQuery) {
term = ((TermQuery) query).getTerm();
} else if (query instanceof PhraseQuery) {
Term[] terms = ((PhraseQuery)query).getTerms();
if (0 == terms.length) {
logger.error("No terms found for query [{}]", query.toString());
return term;
}
term = terms[0];
} else {
logger.error("Unsupported class [{}] for query [{}]", query.getClass().getName(), query.toString());
return term;
}
return term;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools.mapred;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.tools.CopyListing;
import org.apache.hadoop.tools.DistCpConstants;
import org.apache.hadoop.tools.DistCpOptions;
import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
import org.apache.hadoop.tools.GlobbedCopyListing;
import org.apache.hadoop.tools.util.TestDistCpUtils;
import org.apache.hadoop.security.Credentials;
import org.junit.*;
import java.io.IOException;
import java.util.*;
public class TestCopyCommitter {
private static final Log LOG = LogFactory.getLog(TestCopyCommitter.class);
private static final Random rand = new Random();
private static final Credentials CREDENTIALS = new Credentials();
public static final int PORT = 39737;
private static Configuration config;
private static MiniDFSCluster cluster;
private static Job getJobForClient() throws IOException {
Job job = Job.getInstance(new Configuration());
job.getConfiguration().set("mapred.job.tracker", "localhost:" + PORT);
job.setInputFormatClass(NullInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setNumReduceTasks(0);
return job;
}
@BeforeClass
public static void create() throws IOException {
config = getJobForClient().getConfiguration();
config.setLong(DistCpConstants.CONF_LABEL_TOTAL_BYTES_TO_BE_COPIED, 0);
cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).format(true)
.build();
}
@AfterClass
public static void destroy() {
if (cluster != null) {
cluster.shutdown();
}
}
@Before
public void createMetaFolder() {
config.set(DistCpConstants.CONF_LABEL_META_FOLDER, "/meta");
Path meta = new Path("/meta");
try {
cluster.getFileSystem().mkdirs(meta);
} catch (IOException e) {
LOG.error("Exception encountered while creating meta folder", e);
Assert.fail("Unable to create meta folder");
}
}
@After
public void cleanupMetaFolder() {
Path meta = new Path("/meta");
try {
if (cluster.getFileSystem().exists(meta)) {
cluster.getFileSystem().delete(meta, true);
Assert.fail("Expected meta folder to be deleted");
}
} catch (IOException e) {
LOG.error("Exception encountered while cleaning up folder", e);
Assert.fail("Unable to clean up meta folder");
}
}
@Test
public void testNoCommitAction() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
committer.commitJob(jobContext);
Assert.assertEquals(taskAttemptContext.getStatus(), "Commit Successful");
//Test for idempotent commit
committer.commitJob(jobContext);
Assert.assertEquals(taskAttemptContext.getStatus(), "Commit Successful");
} catch (IOException e) {
LOG.error("Exception encountered ", e);
Assert.fail("Commit failed");
}
}
@Test
public void testPreserveStatus() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
Configuration conf = jobContext.getConfiguration();
String sourceBase;
String targetBase;
FileSystem fs = null;
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
fs = FileSystem.get(conf);
FsPermission sourcePerm = new FsPermission((short) 511);
FsPermission initialPerm = new FsPermission((short) 448);
sourceBase = TestDistCpUtils.createTestSetup(fs, sourcePerm);
targetBase = TestDistCpUtils.createTestSetup(fs, initialPerm);
DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)),
new Path("/out"));
options.preserve(FileAttribute.PERMISSION);
options.appendToConf(conf);
CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
listing.buildListing(listingFile, options);
conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
committer.commitJob(jobContext);
if (!checkDirectoryPermissions(fs, targetBase, sourcePerm)) {
Assert.fail("Permission don't match");
}
//Test for idempotent commit
committer.commitJob(jobContext);
if (!checkDirectoryPermissions(fs, targetBase, sourcePerm)) {
Assert.fail("Permission don't match");
}
} catch (IOException e) {
LOG.error("Exception encountered while testing for preserve status", e);
Assert.fail("Preserve status failure");
} finally {
TestDistCpUtils.delete(fs, "/tmp1");
conf.unset(DistCpConstants.CONF_LABEL_PRESERVE_STATUS);
}
}
@Test
public void testDeleteMissing() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
Configuration conf = jobContext.getConfiguration();
String sourceBase;
String targetBase;
FileSystem fs = null;
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
fs = FileSystem.get(conf);
sourceBase = TestDistCpUtils.createTestSetup(fs, FsPermission.getDefault());
targetBase = TestDistCpUtils.createTestSetup(fs, FsPermission.getDefault());
String targetBaseAdd = TestDistCpUtils.createTestSetup(fs, FsPermission.getDefault());
fs.rename(new Path(targetBaseAdd), new Path(targetBase));
DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)),
new Path("/out"));
options.setSyncFolder(true);
options.setDeleteMissing(true);
options.appendToConf(conf);
CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
listing.buildListing(listingFile, options);
conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, targetBase);
committer.commitJob(jobContext);
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, targetBase, sourceBase)) {
Assert.fail("Source and target folders are not in sync");
}
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, sourceBase, targetBase)) {
Assert.fail("Source and target folders are not in sync");
}
//Test for idempotent commit
committer.commitJob(jobContext);
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, targetBase, sourceBase)) {
Assert.fail("Source and target folders are not in sync");
}
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, sourceBase, targetBase)) {
Assert.fail("Source and target folders are not in sync");
}
} catch (Throwable e) {
LOG.error("Exception encountered while testing for delete missing", e);
Assert.fail("Delete missing failure");
} finally {
TestDistCpUtils.delete(fs, "/tmp1");
conf.set(DistCpConstants.CONF_LABEL_DELETE_MISSING, "false");
}
}
@Test
public void testDeleteMissingFlatInterleavedFiles() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
Configuration conf = jobContext.getConfiguration();
String sourceBase;
String targetBase;
FileSystem fs = null;
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
fs = FileSystem.get(conf);
sourceBase = "/tmp1/" + String.valueOf(rand.nextLong());
targetBase = "/tmp1/" + String.valueOf(rand.nextLong());
TestDistCpUtils.createFile(fs, sourceBase + "/1");
TestDistCpUtils.createFile(fs, sourceBase + "/3");
TestDistCpUtils.createFile(fs, sourceBase + "/4");
TestDistCpUtils.createFile(fs, sourceBase + "/5");
TestDistCpUtils.createFile(fs, sourceBase + "/7");
TestDistCpUtils.createFile(fs, sourceBase + "/8");
TestDistCpUtils.createFile(fs, sourceBase + "/9");
TestDistCpUtils.createFile(fs, targetBase + "/2");
TestDistCpUtils.createFile(fs, targetBase + "/4");
TestDistCpUtils.createFile(fs, targetBase + "/5");
TestDistCpUtils.createFile(fs, targetBase + "/7");
TestDistCpUtils.createFile(fs, targetBase + "/9");
TestDistCpUtils.createFile(fs, targetBase + "/A");
DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)),
new Path("/out"));
options.setSyncFolder(true);
options.setDeleteMissing(true);
options.appendToConf(conf);
CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
listing.buildListing(listingFile, options);
conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, targetBase);
committer.commitJob(jobContext);
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, targetBase, sourceBase)) {
Assert.fail("Source and target folders are not in sync");
}
Assert.assertEquals(fs.listStatus(new Path(targetBase)).length, 4);
//Test for idempotent commit
committer.commitJob(jobContext);
if (!TestDistCpUtils.checkIfFoldersAreInSync(fs, targetBase, sourceBase)) {
Assert.fail("Source and target folders are not in sync");
}
Assert.assertEquals(fs.listStatus(new Path(targetBase)).length, 4);
} catch (IOException e) {
LOG.error("Exception encountered while testing for delete missing", e);
Assert.fail("Delete missing failure");
} finally {
TestDistCpUtils.delete(fs, "/tmp1");
conf.set(DistCpConstants.CONF_LABEL_DELETE_MISSING, "false");
}
}
@Test
public void testAtomicCommitMissingFinal() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
Configuration conf = jobContext.getConfiguration();
String workPath = "/tmp1/" + String.valueOf(rand.nextLong());
String finalPath = "/tmp1/" + String.valueOf(rand.nextLong());
FileSystem fs = null;
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
fs = FileSystem.get(conf);
fs.mkdirs(new Path(workPath));
conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, workPath);
conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, finalPath);
conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, true);
Assert.assertTrue(fs.exists(new Path(workPath)));
Assert.assertFalse(fs.exists(new Path(finalPath)));
committer.commitJob(jobContext);
Assert.assertFalse(fs.exists(new Path(workPath)));
Assert.assertTrue(fs.exists(new Path(finalPath)));
//Test for idempotent commit
committer.commitJob(jobContext);
Assert.assertFalse(fs.exists(new Path(workPath)));
Assert.assertTrue(fs.exists(new Path(finalPath)));
} catch (IOException e) {
LOG.error("Exception encountered while testing for preserve status", e);
Assert.fail("Atomic commit failure");
} finally {
TestDistCpUtils.delete(fs, workPath);
TestDistCpUtils.delete(fs, finalPath);
conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, false);
}
}
@Test
public void testAtomicCommitExistingFinal() {
TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
taskAttemptContext.getTaskAttemptID().getJobID());
Configuration conf = jobContext.getConfiguration();
String workPath = "/tmp1/" + String.valueOf(rand.nextLong());
String finalPath = "/tmp1/" + String.valueOf(rand.nextLong());
FileSystem fs = null;
try {
OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
fs = FileSystem.get(conf);
fs.mkdirs(new Path(workPath));
fs.mkdirs(new Path(finalPath));
conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, workPath);
conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, finalPath);
conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, true);
Assert.assertTrue(fs.exists(new Path(workPath)));
Assert.assertTrue(fs.exists(new Path(finalPath)));
try {
committer.commitJob(jobContext);
Assert.fail("Should not be able to atomic-commit to pre-existing path.");
} catch(Exception exception) {
Assert.assertTrue(fs.exists(new Path(workPath)));
Assert.assertTrue(fs.exists(new Path(finalPath)));
LOG.info("Atomic-commit Test pass.");
}
} catch (IOException e) {
LOG.error("Exception encountered while testing for atomic commit.", e);
Assert.fail("Atomic commit failure");
} finally {
TestDistCpUtils.delete(fs, workPath);
TestDistCpUtils.delete(fs, finalPath);
conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, false);
}
}
private TaskAttemptContext getTaskAttemptContext(Configuration conf) {
return new TaskAttemptContextImpl(conf,
new TaskAttemptID("200707121733", 1, TaskType.MAP, 1, 1));
}
private boolean checkDirectoryPermissions(FileSystem fs, String targetBase,
FsPermission sourcePerm) throws IOException {
Path base = new Path(targetBase);
Stack<Path> stack = new Stack<Path>();
stack.push(base);
while (!stack.isEmpty()) {
Path file = stack.pop();
if (!fs.exists(file)) continue;
FileStatus[] fStatus = fs.listStatus(file);
if (fStatus == null || fStatus.length == 0) continue;
for (FileStatus status : fStatus) {
if (status.isDirectory()) {
stack.push(status.getPath());
Assert.assertEquals(status.getPermission(), sourcePerm);
}
}
}
return true;
}
private static class NullInputFormat extends InputFormat {
@Override
public List getSplits(JobContext context)
throws IOException, InterruptedException {
return Collections.EMPTY_LIST;
}
@Override
public RecordReader createRecordReader(InputSplit split,
TaskAttemptContext context)
throws IOException, InterruptedException {
return null;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Subnetwork resource. A subnetwork (also known as a subnet) is a logical partition of
* a Virtual Private Cloud network with one primary IP range and zero or more secondary IP ranges.
* For more information, read Virtual Private Cloud (VPC) Network.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Subnetwork extends com.google.api.client.json.GenericJson {
/**
* Whether this subnetwork's ranges can conflict with existing static routes. Setting this to true
* allows this subnetwork's primary and secondary ranges to overlap with (and contain) static
* routes that have already been configured on the corresponding network. For example if a static
* route has range 10.1.0.0/16, a subnet range 10.0.0.0/8 could only be created if
* allow_conflicting_routes=true. Overlapping is only allowed on subnetwork operations; routes
* whose ranges conflict with this subnetwork's ranges won't be allowed unless
* route.allow_conflicting_subnetworks is set to true. Typically packets destined to IPs within
* the subnetwork (which may contain private/sensitive data) are prevented from leaving the
* virtual network. Setting this field to true will disable this feature. The default value is
* false and applies to all existing subnetworks and automatically created subnetworks. This field
* cannot be set to true at resource creation time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean allowSubnetCidrRoutesOverlap;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* An optional description of this resource. Provide this property when you create the resource.
* This field can be set only at resource creation time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Whether to enable flow logging for this subnetwork. If this field is not explicitly set, it
* will not appear in get listings. If not set the default behavior is determined by the org
* policy, if there is no org policy specified, then it will default to disabled. This field isn't
* supported with the purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableFlowLogs;
/**
* [Output Only] The range of external IPv6 addresses that are owned by this subnetwork.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String externalIpv6Prefix;
/**
* Fingerprint of this resource. A hash of the contents stored in this object. This field is used
* in optimistic locking. This field will be ignored when inserting a Subnetwork. An up-to-date
* fingerprint must be provided in order to update the Subnetwork, otherwise the request will fail
* with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve
* a Subnetwork.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String fingerprint;
/**
* [Output Only] The gateway address for default routes to reach destination addresses outside
* this subnetwork.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String gatewayAddress;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* is for general VM to VM communication, not to be confused with the ipv6_cidr_range field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String internalIpv6Prefix;
/**
* The range of internal addresses that are owned by this subnetwork. Provide this property when
* you create the subnetwork. For example, 10.0.0.0/8 or 100.64.0.0/10. Ranges must be unique and
* non-overlapping within a network. Only IPv4 is supported. This field is set at resource
* creation time. The range can be any range listed in the Valid ranges list. The range can be
* expanded after creation using expandIpCidrRange.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String ipCidrRange;
/**
* The access type of IPv6 address this subnet holds. It's immutable and can only be specified
* during creation or the first time the subnet is updated into IPV4_IPV6 dual stack. If the
* ipv6_type is EXTERNAL then this subnet cannot enable direct path.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String ipv6AccessType;
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* will be for private google access only eventually.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String ipv6CidrRange;
/**
* [Output Only] Type of the resource. Always compute#subnetwork for Subnetwork resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* This field denotes the VPC flow logging options for this subnetwork. If logging is enabled,
* logs are exported to Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SubnetworkLogConfig logConfig;
/**
* The name of the resource, provided by the client when initially creating the resource. The name
* must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63
* characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the
* first character must be a lowercase letter, and all following characters must be a dash,
* lowercase letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The URL of the network to which this subnetwork belongs, provided by the client when initially
* creating the subnetwork. This field can be set only at resource creation time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String network;
/**
* Whether the VMs in this subnet can access Google services without assigned external IP
* addresses. This field can be both set at resource creation time and updated using
* setPrivateIpGoogleAccess.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean privateIpGoogleAccess;
/**
* The private IPv6 google access type for the VMs in this subnet. This is an expanded field of
* enablePrivateV6Access. If both fields are set, privateIpv6GoogleAccess will take priority. This
* field can be both set at resource creation time and updated using patch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String privateIpv6GoogleAccess;
/**
* The purpose of the resource. This field can be either PRIVATE_RFC_1918 or
* INTERNAL_HTTPS_LOAD_BALANCER. A subnetwork with purpose set to INTERNAL_HTTPS_LOAD_BALANCER is
* a user-created subnetwork that is reserved for Internal HTTP(S) Load Balancing. If unspecified,
* the purpose defaults to PRIVATE_RFC_1918. The enableFlowLogs field isn't supported with the
* purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String purpose;
/**
* URL of the region where the Subnetwork resides. This field can be set only at resource creation
* time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* The role of subnetwork. Currently, this field is only used when purpose =
* INTERNAL_HTTPS_LOAD_BALANCER. The value can be set to ACTIVE or BACKUP. An ACTIVE subnetwork is
* one that is currently being used for Internal HTTP(S) Load Balancing. A BACKUP subnetwork is
* one that is ready to be promoted to ACTIVE or is currently draining. This field can be updated
* with a patch request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String role;
/**
* An array of configurations for secondary IP ranges for VM instances contained in this
* subnetwork. The primary IP of such VM must belong to the primary ipCidrRange of the subnetwork.
* The alias IPs may belong to either primary or secondary ranges. This field can be updated with
* a patch request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<SubnetworkSecondaryRange> secondaryIpRanges;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* The stack type for the subnet. If set to IPV4_ONLY, new VMs in the subnet are assigned IPv4
* addresses only. If set to IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 and IPv6
* addresses. If not specified, IPV4_ONLY is used. This field can be both set at resource creation
* time and updated using patch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String stackType;
/**
* [Output Only] The state of the subnetwork, which can be one of the following values: READY:
* Subnetwork is created and ready to use DRAINING: only applicable to subnetworks that have the
* purpose set to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to the load balancer
* are being drained. A subnetwork that is draining cannot be used or modified until it reaches a
* status of READY
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* Whether this subnetwork's ranges can conflict with existing static routes. Setting this to true
* allows this subnetwork's primary and secondary ranges to overlap with (and contain) static
* routes that have already been configured on the corresponding network. For example if a static
* route has range 10.1.0.0/16, a subnet range 10.0.0.0/8 could only be created if
* allow_conflicting_routes=true. Overlapping is only allowed on subnetwork operations; routes
* whose ranges conflict with this subnetwork's ranges won't be allowed unless
* route.allow_conflicting_subnetworks is set to true. Typically packets destined to IPs within
* the subnetwork (which may contain private/sensitive data) are prevented from leaving the
* virtual network. Setting this field to true will disable this feature. The default value is
* false and applies to all existing subnetworks and automatically created subnetworks. This field
* cannot be set to true at resource creation time.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAllowSubnetCidrRoutesOverlap() {
return allowSubnetCidrRoutesOverlap;
}
/**
* Whether this subnetwork's ranges can conflict with existing static routes. Setting this to true
* allows this subnetwork's primary and secondary ranges to overlap with (and contain) static
* routes that have already been configured on the corresponding network. For example if a static
* route has range 10.1.0.0/16, a subnet range 10.0.0.0/8 could only be created if
* allow_conflicting_routes=true. Overlapping is only allowed on subnetwork operations; routes
* whose ranges conflict with this subnetwork's ranges won't be allowed unless
* route.allow_conflicting_subnetworks is set to true. Typically packets destined to IPs within
* the subnetwork (which may contain private/sensitive data) are prevented from leaving the
* virtual network. Setting this field to true will disable this feature. The default value is
* false and applies to all existing subnetworks and automatically created subnetworks. This field
* cannot be set to true at resource creation time.
* @param allowSubnetCidrRoutesOverlap allowSubnetCidrRoutesOverlap or {@code null} for none
*/
public Subnetwork setAllowSubnetCidrRoutesOverlap(java.lang.Boolean allowSubnetCidrRoutesOverlap) {
this.allowSubnetCidrRoutesOverlap = allowSubnetCidrRoutesOverlap;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Subnetwork setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* This field can be set only at resource creation time.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* This field can be set only at resource creation time.
* @param description description or {@code null} for none
*/
public Subnetwork setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Whether to enable flow logging for this subnetwork. If this field is not explicitly set, it
* will not appear in get listings. If not set the default behavior is determined by the org
* policy, if there is no org policy specified, then it will default to disabled. This field isn't
* supported with the purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableFlowLogs() {
return enableFlowLogs;
}
/**
* Whether to enable flow logging for this subnetwork. If this field is not explicitly set, it
* will not appear in get listings. If not set the default behavior is determined by the org
* policy, if there is no org policy specified, then it will default to disabled. This field isn't
* supported with the purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* @param enableFlowLogs enableFlowLogs or {@code null} for none
*/
public Subnetwork setEnableFlowLogs(java.lang.Boolean enableFlowLogs) {
this.enableFlowLogs = enableFlowLogs;
return this;
}
/**
* [Output Only] The range of external IPv6 addresses that are owned by this subnetwork.
* @return value or {@code null} for none
*/
public java.lang.String getExternalIpv6Prefix() {
return externalIpv6Prefix;
}
/**
* [Output Only] The range of external IPv6 addresses that are owned by this subnetwork.
* @param externalIpv6Prefix externalIpv6Prefix or {@code null} for none
*/
public Subnetwork setExternalIpv6Prefix(java.lang.String externalIpv6Prefix) {
this.externalIpv6Prefix = externalIpv6Prefix;
return this;
}
/**
* Fingerprint of this resource. A hash of the contents stored in this object. This field is used
* in optimistic locking. This field will be ignored when inserting a Subnetwork. An up-to-date
* fingerprint must be provided in order to update the Subnetwork, otherwise the request will fail
* with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve
* a Subnetwork.
* @see #decodeFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getFingerprint() {
return fingerprint;
}
/**
* Fingerprint of this resource. A hash of the contents stored in this object. This field is used
* in optimistic locking. This field will be ignored when inserting a Subnetwork. An up-to-date
* fingerprint must be provided in order to update the Subnetwork, otherwise the request will fail
* with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve
* a Subnetwork.
* @see #getFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(fingerprint);
}
/**
* Fingerprint of this resource. A hash of the contents stored in this object. This field is used
* in optimistic locking. This field will be ignored when inserting a Subnetwork. An up-to-date
* fingerprint must be provided in order to update the Subnetwork, otherwise the request will fail
* with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve
* a Subnetwork.
* @see #encodeFingerprint()
* @param fingerprint fingerprint or {@code null} for none
*/
public Subnetwork setFingerprint(java.lang.String fingerprint) {
this.fingerprint = fingerprint;
return this;
}
/**
* Fingerprint of this resource. A hash of the contents stored in this object. This field is used
* in optimistic locking. This field will be ignored when inserting a Subnetwork. An up-to-date
* fingerprint must be provided in order to update the Subnetwork, otherwise the request will fail
* with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve
* a Subnetwork.
* @see #setFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Subnetwork encodeFingerprint(byte[] fingerprint) {
this.fingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(fingerprint);
return this;
}
/**
* [Output Only] The gateway address for default routes to reach destination addresses outside
* this subnetwork.
* @return value or {@code null} for none
*/
public java.lang.String getGatewayAddress() {
return gatewayAddress;
}
/**
* [Output Only] The gateway address for default routes to reach destination addresses outside
* this subnetwork.
* @param gatewayAddress gatewayAddress or {@code null} for none
*/
public Subnetwork setGatewayAddress(java.lang.String gatewayAddress) {
this.gatewayAddress = gatewayAddress;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Subnetwork setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* is for general VM to VM communication, not to be confused with the ipv6_cidr_range field.
* @return value or {@code null} for none
*/
public java.lang.String getInternalIpv6Prefix() {
return internalIpv6Prefix;
}
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* is for general VM to VM communication, not to be confused with the ipv6_cidr_range field.
* @param internalIpv6Prefix internalIpv6Prefix or {@code null} for none
*/
public Subnetwork setInternalIpv6Prefix(java.lang.String internalIpv6Prefix) {
this.internalIpv6Prefix = internalIpv6Prefix;
return this;
}
/**
* The range of internal addresses that are owned by this subnetwork. Provide this property when
* you create the subnetwork. For example, 10.0.0.0/8 or 100.64.0.0/10. Ranges must be unique and
* non-overlapping within a network. Only IPv4 is supported. This field is set at resource
* creation time. The range can be any range listed in the Valid ranges list. The range can be
* expanded after creation using expandIpCidrRange.
* @return value or {@code null} for none
*/
public java.lang.String getIpCidrRange() {
return ipCidrRange;
}
/**
* The range of internal addresses that are owned by this subnetwork. Provide this property when
* you create the subnetwork. For example, 10.0.0.0/8 or 100.64.0.0/10. Ranges must be unique and
* non-overlapping within a network. Only IPv4 is supported. This field is set at resource
* creation time. The range can be any range listed in the Valid ranges list. The range can be
* expanded after creation using expandIpCidrRange.
* @param ipCidrRange ipCidrRange or {@code null} for none
*/
public Subnetwork setIpCidrRange(java.lang.String ipCidrRange) {
this.ipCidrRange = ipCidrRange;
return this;
}
/**
* The access type of IPv6 address this subnet holds. It's immutable and can only be specified
* during creation or the first time the subnet is updated into IPV4_IPV6 dual stack. If the
* ipv6_type is EXTERNAL then this subnet cannot enable direct path.
* @return value or {@code null} for none
*/
public java.lang.String getIpv6AccessType() {
return ipv6AccessType;
}
/**
* The access type of IPv6 address this subnet holds. It's immutable and can only be specified
* during creation or the first time the subnet is updated into IPV4_IPV6 dual stack. If the
* ipv6_type is EXTERNAL then this subnet cannot enable direct path.
* @param ipv6AccessType ipv6AccessType or {@code null} for none
*/
public Subnetwork setIpv6AccessType(java.lang.String ipv6AccessType) {
this.ipv6AccessType = ipv6AccessType;
return this;
}
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* will be for private google access only eventually.
* @return value or {@code null} for none
*/
public java.lang.String getIpv6CidrRange() {
return ipv6CidrRange;
}
/**
* [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. Note this
* will be for private google access only eventually.
* @param ipv6CidrRange ipv6CidrRange or {@code null} for none
*/
public Subnetwork setIpv6CidrRange(java.lang.String ipv6CidrRange) {
this.ipv6CidrRange = ipv6CidrRange;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#subnetwork for Subnetwork resources.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#subnetwork for Subnetwork resources.
* @param kind kind or {@code null} for none
*/
public Subnetwork setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* This field denotes the VPC flow logging options for this subnetwork. If logging is enabled,
* logs are exported to Cloud Logging.
* @return value or {@code null} for none
*/
public SubnetworkLogConfig getLogConfig() {
return logConfig;
}
/**
* This field denotes the VPC flow logging options for this subnetwork. If logging is enabled,
* logs are exported to Cloud Logging.
* @param logConfig logConfig or {@code null} for none
*/
public Subnetwork setLogConfig(SubnetworkLogConfig logConfig) {
this.logConfig = logConfig;
return this;
}
/**
* The name of the resource, provided by the client when initially creating the resource. The name
* must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63
* characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the
* first character must be a lowercase letter, and all following characters must be a dash,
* lowercase letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of the resource, provided by the client when initially creating the resource. The name
* must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63
* characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the
* first character must be a lowercase letter, and all following characters must be a dash,
* lowercase letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Subnetwork setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The URL of the network to which this subnetwork belongs, provided by the client when initially
* creating the subnetwork. This field can be set only at resource creation time.
* @return value or {@code null} for none
*/
public java.lang.String getNetwork() {
return network;
}
/**
* The URL of the network to which this subnetwork belongs, provided by the client when initially
* creating the subnetwork. This field can be set only at resource creation time.
* @param network network or {@code null} for none
*/
public Subnetwork setNetwork(java.lang.String network) {
this.network = network;
return this;
}
/**
* Whether the VMs in this subnet can access Google services without assigned external IP
* addresses. This field can be both set at resource creation time and updated using
* setPrivateIpGoogleAccess.
* @return value or {@code null} for none
*/
public java.lang.Boolean getPrivateIpGoogleAccess() {
return privateIpGoogleAccess;
}
/**
* Whether the VMs in this subnet can access Google services without assigned external IP
* addresses. This field can be both set at resource creation time and updated using
* setPrivateIpGoogleAccess.
* @param privateIpGoogleAccess privateIpGoogleAccess or {@code null} for none
*/
public Subnetwork setPrivateIpGoogleAccess(java.lang.Boolean privateIpGoogleAccess) {
this.privateIpGoogleAccess = privateIpGoogleAccess;
return this;
}
/**
* The private IPv6 google access type for the VMs in this subnet. This is an expanded field of
* enablePrivateV6Access. If both fields are set, privateIpv6GoogleAccess will take priority. This
* field can be both set at resource creation time and updated using patch.
* @return value or {@code null} for none
*/
public java.lang.String getPrivateIpv6GoogleAccess() {
return privateIpv6GoogleAccess;
}
/**
* The private IPv6 google access type for the VMs in this subnet. This is an expanded field of
* enablePrivateV6Access. If both fields are set, privateIpv6GoogleAccess will take priority. This
* field can be both set at resource creation time and updated using patch.
* @param privateIpv6GoogleAccess privateIpv6GoogleAccess or {@code null} for none
*/
public Subnetwork setPrivateIpv6GoogleAccess(java.lang.String privateIpv6GoogleAccess) {
this.privateIpv6GoogleAccess = privateIpv6GoogleAccess;
return this;
}
/**
* The purpose of the resource. This field can be either PRIVATE_RFC_1918 or
* INTERNAL_HTTPS_LOAD_BALANCER. A subnetwork with purpose set to INTERNAL_HTTPS_LOAD_BALANCER is
* a user-created subnetwork that is reserved for Internal HTTP(S) Load Balancing. If unspecified,
* the purpose defaults to PRIVATE_RFC_1918. The enableFlowLogs field isn't supported with the
* purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* @return value or {@code null} for none
*/
public java.lang.String getPurpose() {
return purpose;
}
/**
* The purpose of the resource. This field can be either PRIVATE_RFC_1918 or
* INTERNAL_HTTPS_LOAD_BALANCER. A subnetwork with purpose set to INTERNAL_HTTPS_LOAD_BALANCER is
* a user-created subnetwork that is reserved for Internal HTTP(S) Load Balancing. If unspecified,
* the purpose defaults to PRIVATE_RFC_1918. The enableFlowLogs field isn't supported with the
* purpose field set to INTERNAL_HTTPS_LOAD_BALANCER.
* @param purpose purpose or {@code null} for none
*/
public Subnetwork setPurpose(java.lang.String purpose) {
this.purpose = purpose;
return this;
}
/**
* URL of the region where the Subnetwork resides. This field can be set only at resource creation
* time.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* URL of the region where the Subnetwork resides. This field can be set only at resource creation
* time.
* @param region region or {@code null} for none
*/
public Subnetwork setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* The role of subnetwork. Currently, this field is only used when purpose =
* INTERNAL_HTTPS_LOAD_BALANCER. The value can be set to ACTIVE or BACKUP. An ACTIVE subnetwork is
* one that is currently being used for Internal HTTP(S) Load Balancing. A BACKUP subnetwork is
* one that is ready to be promoted to ACTIVE or is currently draining. This field can be updated
* with a patch request.
* @return value or {@code null} for none
*/
public java.lang.String getRole() {
return role;
}
/**
* The role of subnetwork. Currently, this field is only used when purpose =
* INTERNAL_HTTPS_LOAD_BALANCER. The value can be set to ACTIVE or BACKUP. An ACTIVE subnetwork is
* one that is currently being used for Internal HTTP(S) Load Balancing. A BACKUP subnetwork is
* one that is ready to be promoted to ACTIVE or is currently draining. This field can be updated
* with a patch request.
* @param role role or {@code null} for none
*/
public Subnetwork setRole(java.lang.String role) {
this.role = role;
return this;
}
/**
* An array of configurations for secondary IP ranges for VM instances contained in this
* subnetwork. The primary IP of such VM must belong to the primary ipCidrRange of the subnetwork.
* The alias IPs may belong to either primary or secondary ranges. This field can be updated with
* a patch request.
* @return value or {@code null} for none
*/
public java.util.List<SubnetworkSecondaryRange> getSecondaryIpRanges() {
return secondaryIpRanges;
}
/**
* An array of configurations for secondary IP ranges for VM instances contained in this
* subnetwork. The primary IP of such VM must belong to the primary ipCidrRange of the subnetwork.
* The alias IPs may belong to either primary or secondary ranges. This field can be updated with
* a patch request.
* @param secondaryIpRanges secondaryIpRanges or {@code null} for none
*/
public Subnetwork setSecondaryIpRanges(java.util.List<SubnetworkSecondaryRange> secondaryIpRanges) {
this.secondaryIpRanges = secondaryIpRanges;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Subnetwork setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* The stack type for the subnet. If set to IPV4_ONLY, new VMs in the subnet are assigned IPv4
* addresses only. If set to IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 and IPv6
* addresses. If not specified, IPV4_ONLY is used. This field can be both set at resource creation
* time and updated using patch.
* @return value or {@code null} for none
*/
public java.lang.String getStackType() {
return stackType;
}
/**
* The stack type for the subnet. If set to IPV4_ONLY, new VMs in the subnet are assigned IPv4
* addresses only. If set to IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 and IPv6
* addresses. If not specified, IPV4_ONLY is used. This field can be both set at resource creation
* time and updated using patch.
* @param stackType stackType or {@code null} for none
*/
public Subnetwork setStackType(java.lang.String stackType) {
this.stackType = stackType;
return this;
}
/**
* [Output Only] The state of the subnetwork, which can be one of the following values: READY:
* Subnetwork is created and ready to use DRAINING: only applicable to subnetworks that have the
* purpose set to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to the load balancer
* are being drained. A subnetwork that is draining cannot be used or modified until it reaches a
* status of READY
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* [Output Only] The state of the subnetwork, which can be one of the following values: READY:
* Subnetwork is created and ready to use DRAINING: only applicable to subnetworks that have the
* purpose set to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to the load balancer
* are being drained. A subnetwork that is draining cannot be used or modified until it reaches a
* status of READY
* @param state state or {@code null} for none
*/
public Subnetwork setState(java.lang.String state) {
this.state = state;
return this;
}
@Override
public Subnetwork set(String fieldName, Object value) {
return (Subnetwork) super.set(fieldName, value);
}
@Override
public Subnetwork clone() {
return (Subnetwork) super.clone();
}
}
| |
/*
* Copyright (c) 1997, 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.multi;
import java.util.Vector;
import javax.swing.plaf.TreeUI;
import java.awt.Rectangle;
import javax.swing.JTree;
import javax.swing.tree.TreePath;
import javax.swing.plaf.ComponentUI;
import javax.swing.JComponent;
import java.awt.Graphics;
import java.awt.Dimension;
import javax.accessibility.Accessible;
/**
* A multiplexing UI used to combine <code>TreeUI</code>s.
*
* <p>This file was automatically generated by AutoMulti.
*
* @author Otto Multey
*/
public class MultiTreeUI extends TreeUI {
/**
* The vector containing the real UIs. This is populated
* in the call to <code>createUI</code>, and can be obtained by calling
* the <code>getUIs</code> method. The first element is guaranteed to be the real UI
* obtained from the default look and feel.
*/
protected Vector uis = new Vector();
////////////////////
// Common UI methods
////////////////////
/**
* Returns the list of UIs associated with this multiplexing UI. This
* allows processing of the UIs by an application aware of multiplexing
* UIs on components.
*/
public ComponentUI[] getUIs() {
return MultiLookAndFeel.uisToArray(uis);
}
////////////////////
// TreeUI methods
////////////////////
/**
* Invokes the <code>getPathBounds</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public Rectangle getPathBounds(JTree a, TreePath b) {
Rectangle returnValue =
((TreeUI) (uis.elementAt(0))).getPathBounds(a,b);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getPathBounds(a,b);
}
return returnValue;
}
/**
* Invokes the <code>getPathForRow</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public TreePath getPathForRow(JTree a, int b) {
TreePath returnValue =
((TreeUI) (uis.elementAt(0))).getPathForRow(a,b);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getPathForRow(a,b);
}
return returnValue;
}
/**
* Invokes the <code>getRowForPath</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public int getRowForPath(JTree a, TreePath b) {
int returnValue =
((TreeUI) (uis.elementAt(0))).getRowForPath(a,b);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getRowForPath(a,b);
}
return returnValue;
}
/**
* Invokes the <code>getRowCount</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public int getRowCount(JTree a) {
int returnValue =
((TreeUI) (uis.elementAt(0))).getRowCount(a);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getRowCount(a);
}
return returnValue;
}
/**
* Invokes the <code>getClosestPathForLocation</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public TreePath getClosestPathForLocation(JTree a, int b, int c) {
TreePath returnValue =
((TreeUI) (uis.elementAt(0))).getClosestPathForLocation(a,b,c);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getClosestPathForLocation(a,b,c);
}
return returnValue;
}
/**
* Invokes the <code>isEditing</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public boolean isEditing(JTree a) {
boolean returnValue =
((TreeUI) (uis.elementAt(0))).isEditing(a);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).isEditing(a);
}
return returnValue;
}
/**
* Invokes the <code>stopEditing</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public boolean stopEditing(JTree a) {
boolean returnValue =
((TreeUI) (uis.elementAt(0))).stopEditing(a);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).stopEditing(a);
}
return returnValue;
}
/**
* Invokes the <code>cancelEditing</code> method on each UI handled by this object.
*/
public void cancelEditing(JTree a) {
for (int i = 0; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).cancelEditing(a);
}
}
/**
* Invokes the <code>startEditingAtPath</code> method on each UI handled by this object.
*/
public void startEditingAtPath(JTree a, TreePath b) {
for (int i = 0; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).startEditingAtPath(a,b);
}
}
/**
* Invokes the <code>getEditingPath</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public TreePath getEditingPath(JTree a) {
TreePath returnValue =
((TreeUI) (uis.elementAt(0))).getEditingPath(a);
for (int i = 1; i < uis.size(); i++) {
((TreeUI) (uis.elementAt(i))).getEditingPath(a);
}
return returnValue;
}
////////////////////
// ComponentUI methods
////////////////////
/**
* Invokes the <code>contains</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public boolean contains(JComponent a, int b, int c) {
boolean returnValue =
((ComponentUI) (uis.elementAt(0))).contains(a,b,c);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).contains(a,b,c);
}
return returnValue;
}
/**
* Invokes the <code>update</code> method on each UI handled by this object.
*/
public void update(Graphics a, JComponent b) {
for (int i = 0; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).update(a,b);
}
}
/**
* Returns a multiplexing UI instance if any of the auxiliary
* <code>LookAndFeel</code>s supports this UI. Otherwise, just returns the
* UI object obtained from the default <code>LookAndFeel</code>.
*/
public static ComponentUI createUI(JComponent a) {
ComponentUI mui = new MultiTreeUI();
return MultiLookAndFeel.createUIs(mui,
((MultiTreeUI) mui).uis,
a);
}
/**
* Invokes the <code>installUI</code> method on each UI handled by this object.
*/
public void installUI(JComponent a) {
for (int i = 0; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).installUI(a);
}
}
/**
* Invokes the <code>uninstallUI</code> method on each UI handled by this object.
*/
public void uninstallUI(JComponent a) {
for (int i = 0; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).uninstallUI(a);
}
}
/**
* Invokes the <code>paint</code> method on each UI handled by this object.
*/
public void paint(Graphics a, JComponent b) {
for (int i = 0; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).paint(a,b);
}
}
/**
* Invokes the <code>getPreferredSize</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public Dimension getPreferredSize(JComponent a) {
Dimension returnValue =
((ComponentUI) (uis.elementAt(0))).getPreferredSize(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getPreferredSize(a);
}
return returnValue;
}
/**
* Invokes the <code>getMinimumSize</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public Dimension getMinimumSize(JComponent a) {
Dimension returnValue =
((ComponentUI) (uis.elementAt(0))).getMinimumSize(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getMinimumSize(a);
}
return returnValue;
}
/**
* Invokes the <code>getMaximumSize</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public Dimension getMaximumSize(JComponent a) {
Dimension returnValue =
((ComponentUI) (uis.elementAt(0))).getMaximumSize(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getMaximumSize(a);
}
return returnValue;
}
/**
* Invokes the <code>getAccessibleChildrenCount</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public int getAccessibleChildrenCount(JComponent a) {
int returnValue =
((ComponentUI) (uis.elementAt(0))).getAccessibleChildrenCount(a);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getAccessibleChildrenCount(a);
}
return returnValue;
}
/**
* Invokes the <code>getAccessibleChild</code> method on each UI handled by this object.
*
* @return the value obtained from the first UI, which is
* the UI obtained from the default <code>LookAndFeel</code>
*/
public Accessible getAccessibleChild(JComponent a, int b) {
Accessible returnValue =
((ComponentUI) (uis.elementAt(0))).getAccessibleChild(a,b);
for (int i = 1; i < uis.size(); i++) {
((ComponentUI) (uis.elementAt(i))).getAccessibleChild(a,b);
}
return returnValue;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.svn;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.annotate.FileAnnotation;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.VcsAnnotationLocalChangesListener;
import com.intellij.openapi.vcs.history.VcsRevisionDescription;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.Objects;
import static java.util.Collections.singletonList;
import static org.junit.Assert.*;
public class SvnAnnotationIsClosedTest extends SvnTestCase {
private volatile boolean myIsClosed;
private volatile boolean myIsClosed1;
@Override
@Before
public void before() throws Exception {
super.before();
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
myIsClosed = false;
myIsClosed1 = false;
}
@Test
public void testClosedByCommitFromIdea() throws Exception {
final SubTree tree = setUpWorkingCopy();
setUpAnnotation(tree.myS1File);
editFileInCommand(tree.myS1File, "1\n2\n3**\n4++\n");
assertFalse(myIsClosed);
refreshChanges();
final Change change = changeListManager.getChange(tree.myS1File);
assertNotNull(change);
commit(singletonList(change), "commit");
dirtyScopeManager.fileDirty(tree.myS1File);
waitChangesAndAnnotations();
assertTrue(myIsClosed);
}
@Test
public void testClosedByUpdateInIdea() throws Exception {
final SubTree tree = setUpWorkingCopy();
runInAndVerifyIgnoreOutput("up", "-r", "2");
setUpAnnotation(tree.myS1File);
refreshChanges();
imitUpdate();
assertTrue(myIsClosed);
}
@Test
public void testClosedChangedByUpdateInIdea() throws Exception {
final SubTree tree = setUpWorkingCopy();
runInAndVerifyIgnoreOutput("up", "-r", "2");
setUpAnnotation(tree.myS1File);
editFileInCommand(tree.myS1File, "1+\n2\n3\n4\n");
refreshChanges();
assertFalse(myIsClosed);
imitUpdate();
assertTrue(myIsClosed);
}
@Test
public void testClosedByExternalUpdate() throws Exception {
final SubTree tree = setUpWorkingCopy();
runInAndVerifyIgnoreOutput("up", "-r", "2");
setUpAnnotation(tree.myS1File);
editFileInCommand(tree.myS1File, "1+\n2\n3\n4\n");
refreshChanges();
assertFalse(myIsClosed);
update();
refreshVfs();
waitChangesAndAnnotations();
assertTrue(myIsClosed);
}
@Test
public void testNotClosedByRenaming() throws Exception {
final SubTree tree = setUpWorkingCopy();
setUpAnnotation(tree.myS1File);
editFileInCommand(tree.myS1File, "1\n2\n3**\n4++\n");
assertFalse(myIsClosed);
renameFileInCommand(tree.myS1File, "5364536");
assertFalse(myIsClosed);
refreshChanges();
assertNotNull(changeListManager.getChange(tree.myS1File));
}
@Test
public void testAnnotateRenamed() throws Exception {
final SubTree tree = setUpWorkingCopy();
editFileInCommand(tree.myS1File, "1\n2\n3**\n4++\n");
setUpAnnotation(tree.myS1File);
assertFalse(myIsClosed);
refreshChanges();
assertNotNull(changeListManager.getChange(tree.myS1File));
}
@Test
public void testClosedByExternalCommit() throws Exception {
final SubTree tree = setUpWorkingCopy();
setUpAnnotation(tree.myS1File);
editFileInCommand(tree.myS1File, "1+\n2\n3\n4\n");
refreshChanges();
assertFalse(myIsClosed);
checkin();
refreshVfs();
waitChangesAndAnnotations();
assertTrue(myIsClosed);
}
@Test
public void testClosedByUpdateWithExternals() throws Exception {
prepareExternal();
VirtualFile sourceDir = Objects.requireNonNull(myWorkingCopyDir.findChild("source"));
VirtualFile externalDir = Objects.requireNonNull(sourceDir.findChild("external"));
final VirtualFile vf1 = Objects.requireNonNull(sourceDir.findChild("s1.txt"));
final VirtualFile vf2 = Objects.requireNonNull(externalDir.findChild("t12.txt"));
editFileInCommand(vf1, "test externals 123" + System.currentTimeMillis());
editFileInCommand(vf2, "test externals 123" + System.currentTimeMillis());
refreshChanges();
assertNotNull(changeListManager.getChange(vf1));
assertNotNull(changeListManager.getChange(vf2));
runInAndVerifyIgnoreOutput("ci", "-m", "test", sourceDir.getPath());
runInAndVerifyIgnoreOutput("ci", "-m", "test", externalDir.getPath());
editFileInCommand(vf2, "test externals 12344444" + System.currentTimeMillis());
runInAndVerifyIgnoreOutput("ci", "-m", "test", externalDir.getPath());
assertRevision(vf1, 3);
assertRevision(vf2, 5);
runInAndVerifyIgnoreOutput("up", "-r", "4", sourceDir.getPath());
runInAndVerifyIgnoreOutput("up", "-r", "4", externalDir.getPath());
assertRevision(vf1, 3);
assertRevision(vf2, 4);
setUpAnnotation(vf1);
setUpAnnotation(vf2, () -> myIsClosed1 = true);
runInAndVerifyIgnoreOutput("up", sourceDir.getPath());
refreshVfs();
waitChangesAndAnnotations();
assertRevision(vf1, 3);
assertRevision(vf2, 5);
assertTrue(myIsClosed1);
assertFalse(myIsClosed);
}
@NotNull
private SubTree setUpWorkingCopy() throws IOException {
final SubTree tree = new SubTree(myWorkingCopyDir);
checkin();
editFileInCommand(tree.myS1File, "1\n2\n3\n4\n");
checkin();
editFileInCommand(tree.myS1File, "1\n2\n3**\n4\n");
checkin();
return tree;
}
private void setUpAnnotation(@NotNull VirtualFile file) throws VcsException {
setUpAnnotation(file, () -> myIsClosed = true);
}
private void setUpAnnotation(@NotNull VirtualFile file, @NotNull Runnable closer) throws VcsException {
final VcsAnnotationLocalChangesListener listener = vcsManager.getAnnotationLocalChangesListener();
final FileAnnotation annotation = createTestAnnotation(vcs.getAnnotationProvider(), file);
annotation.setCloser(() -> {
closer.run();
listener.unregisterAnnotation(file, annotation);
});
listener.registerAnnotation(file, annotation);
}
private void assertRevision(@NotNull VirtualFile file, final long number) {
final VcsRevisionDescription revision = ((SvnDiffProvider)vcs.getDiffProvider()).getCurrentRevisionDescription(file);
assertEquals(number, ((SvnRevisionNumber)revision.getRevisionNumber()).getLongRevisionNumber());
}
}
| |
/*
* Copyright 2006-2009 Odysseus Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.odysseus.el.tree.impl;
import java.util.HashMap;
import de.odysseus.el.misc.LocalMessages;
/**
* Handcrafted scanner.
*
* @author Christoph Beck
*/
public class Scanner {
/**
* Scan exception type
*/
@SuppressWarnings("serial")
public static class ScanException extends Exception {
final int position;
final String encountered;
final String expected;
public ScanException(int position, String encountered, String expected) {
super(LocalMessages.get("error.scan", position, encountered, expected));
this.position = position;
this.encountered = encountered;
this.expected = expected;
}
}
public static class Token {
private final Symbol symbol;
private final String image;
private final int length;
public Token(Symbol symbol, String image) {
this(symbol, image, image.length());
}
public Token(Symbol symbol, String image, int length) {
this.symbol = symbol;
this.image = image;
this.length = length;
}
public Symbol getSymbol() {
return symbol;
}
public String getImage() {
return image;
}
public int getSize() {
return length;
}
@Override
public String toString() {
return symbol.toString();
}
}
public static class ExtensionToken extends Token {
public ExtensionToken(String image) {
super(Scanner.Symbol.EXTENSION, image);
}
}
/**
* Symbol type
*/
public enum Symbol {
EOF,
PLUS("'+'"), MINUS("'-'"),
MUL("'*'"), DIV("'/'|'div'"), MOD("'%'|'mod'"),
LPAREN("'('"), RPAREN("')'"),
IDENTIFIER,
NOT("'!'|'not'"), AND("'&&'|'and'"), OR("'||'|'or'"),
EMPTY("'empty'"), INSTANCEOF("'instanceof'"),
INTEGER, FLOAT, TRUE("'true'"), FALSE("'false'"), STRING, NULL("'null'"),
LE("'<='|'le'"), LT("'<'|'lt'"), GE("'>='|'ge'"), GT("'>'|'gt'"),
EQ("'=='|'eq'"), NE("'!='|'ne'"),
QUESTION("'?'"), COLON("':'"),
TEXT,
DOT("'.'"), LBRACK("'['"), RBRACK("']'"),
COMMA("','"),
START_EVAL_DEFERRED("'#{'"), START_EVAL_DYNAMIC("'${'"), END_EVAL("'}'"),
EXTENSION; // used in syntax extensions
private final String string;
private Symbol() {
this(null);
}
private Symbol(String string) {
this.string = string;
}
@Override
public String toString() {
return string == null ? "<" + name() + ">" : string;
}
}
private static final HashMap<String, Token> KEYMAP = new HashMap<String, Token>();
private static final HashMap<Symbol, Token> FIXMAP = new HashMap<Symbol, Token>();
private static void addFixToken(Token token) {
FIXMAP.put(token.getSymbol(), token);
}
private static void addKeyToken(Token token) {
KEYMAP.put(token.getImage(), token);
}
static {
addFixToken(new Token(Symbol.PLUS, "+"));
addFixToken(new Token(Symbol.MINUS, "-"));
addFixToken(new Token(Symbol.MUL, "*"));
addFixToken(new Token(Symbol.DIV, "/"));
addFixToken(new Token(Symbol.MOD, "%"));
addFixToken(new Token(Symbol.LPAREN, "("));
addFixToken(new Token(Symbol.RPAREN, ")"));
addFixToken(new Token(Symbol.NOT, "!"));
addFixToken(new Token(Symbol.AND, "&&"));
addFixToken(new Token(Symbol.OR, "||"));
addFixToken(new Token(Symbol.EQ, "=="));
addFixToken(new Token(Symbol.NE, "!="));
addFixToken(new Token(Symbol.LT, "<"));
addFixToken(new Token(Symbol.LE, "<="));
addFixToken(new Token(Symbol.GT, ">"));
addFixToken(new Token(Symbol.GE, ">="));
addFixToken(new Token(Symbol.QUESTION, "?"));
addFixToken(new Token(Symbol.COLON, ":"));
addFixToken(new Token(Symbol.COMMA, ","));
addFixToken(new Token(Symbol.DOT, "."));
addFixToken(new Token(Symbol.LBRACK, "["));
addFixToken(new Token(Symbol.RBRACK, "]"));
addFixToken(new Token(Symbol.START_EVAL_DEFERRED, "#{"));
addFixToken(new Token(Symbol.START_EVAL_DYNAMIC, "${"));
addFixToken(new Token(Symbol.END_EVAL, "}"));
addFixToken(new Token(Symbol.EOF, null, 0));
addKeyToken(new Token(Symbol.NULL, "null"));
addKeyToken(new Token(Symbol.TRUE, "true"));
addKeyToken(new Token(Symbol.FALSE, "false"));
addKeyToken(new Token(Symbol.EMPTY, "empty"));
addKeyToken(new Token(Symbol.DIV, "div"));
addKeyToken(new Token(Symbol.MOD, "mod"));
addKeyToken(new Token(Symbol.NOT, "not"));
addKeyToken(new Token(Symbol.AND, "and"));
addKeyToken(new Token(Symbol.OR, "or"));
addKeyToken(new Token(Symbol.LE, "le"));
addKeyToken(new Token(Symbol.LT, "lt"));
addKeyToken(new Token(Symbol.EQ, "eq"));
addKeyToken(new Token(Symbol.NE, "ne"));
addKeyToken(new Token(Symbol.GE, "ge"));
addKeyToken(new Token(Symbol.GT, "gt"));
addKeyToken(new Token(Symbol.INSTANCEOF, "instanceof"));
}
private Token token; // current token
private int position; // start position of current token
private final String input;
protected final StringBuilder builder = new StringBuilder();
/**
* Constructor.
* @param input expression string
*/
protected Scanner(String input) {
this.input = input;
}
public String getInput() {
return input;
}
/**
* @return current token
*/
public Token getToken() {
return token;
}
/**
* @return current input position
*/
public int getPosition() {
return position;
}
/**
* @return <code>true</code> iff the specified character is a digit
*/
protected boolean isDigit(char c) {
return c >= '0' && c <= '9';
}
/**
* @param s name
* @return token for the given keyword or <code>null</code>
*/
protected Token keyword(String s) {
return KEYMAP.get(s);
}
/**
* @param symbol
* @return token for the given symbol
*/
protected Token fixed(Symbol symbol) {
return FIXMAP.get(symbol);
}
protected Token token(Symbol symbol, String value, int length) {
return new Token(symbol, value, length);
}
protected boolean isEval() {
return token != null && token.getSymbol() != Symbol.TEXT && token.getSymbol() != Symbol.END_EVAL;
}
/**
* text token
*/
protected Token nextText() throws ScanException {
builder.setLength(0);
int i = position;
int l = input.length();
boolean escaped = false;
while (i < l) {
char c = input.charAt(i);
switch (c) {
case '\\':
if (escaped) {
builder.append('\\');
} else {
escaped = true;
}
break;
case '#':
case '$':
if (i+1 < l && input.charAt(i+1) == '{') {
if (escaped) {
builder.append(c);
} else {
return token(Symbol.TEXT, builder.toString(), i - position);
}
} else {
if (escaped) {
builder.append('\\');
}
builder.append(c);
}
escaped = false;
break;
default:
if (escaped) {
builder.append('\\');
}
builder.append(c);
escaped = false;
}
i++;
}
if (escaped) {
builder.append('\\');
}
return token(Symbol.TEXT, builder.toString(), i - position);
}
/**
* string token
*/
protected Token nextString() throws ScanException {
builder.setLength(0);
char quote = input.charAt(position);
int i = position+1;
int l = input.length();
while (i < l) {
char c = input.charAt(i++);
if (c == '\\') {
if (i == l) {
throw new ScanException(position, "unterminated string", quote + " or \\");
} else {
c = input.charAt(i++);
if (c == '\\' || c == quote) {
builder.append(c);
} else {
throw new ScanException(position, "invalid escape sequence \\" + c, "\\" + quote + " or \\\\");
}
}
} else if (c == quote) {
return token(Symbol.STRING, builder.toString(), i - position);
} else {
builder.append(c);
}
}
throw new ScanException(position, "unterminated string", String.valueOf(quote));
}
/**
* number token
*/
protected Token nextNumber() throws ScanException {
int i = position;
int l = input.length();
while (i < l && isDigit(input.charAt(i))) {
i++;
}
Symbol symbol = Symbol.INTEGER;
if (i < l && input.charAt(i) == '.') {
i++;
while (i < l && isDigit(input.charAt(i))) {
i++;
}
symbol = Symbol.FLOAT;
}
if (i < l && (input.charAt(i) == 'e' || input.charAt(i) == 'E')) {
int e = i;
i++;
if (i < l && (input.charAt(i) == '+' || input.charAt(i) == '-')) {
i++;
}
if (i < l && isDigit(input.charAt(i))) {
i++;
while (i < l && isDigit(input.charAt(i))) {
i++;
}
symbol = Symbol.FLOAT;
} else {
i = e;
}
}
return token(symbol, input.substring(position, i), i - position);
}
/**
* token inside an eval expression
*/
protected Token nextEval() throws ScanException {
char c1 = input.charAt(position);
char c2 = position < input.length()-1 ? input.charAt(position+1) : (char)0;
switch (c1) {
case '*': return fixed(Symbol.MUL);
case '/': return fixed(Symbol.DIV);
case '%': return fixed(Symbol.MOD);
case '+': return fixed(Symbol.PLUS);
case '-': return fixed(Symbol.MINUS);
case '?': return fixed(Symbol.QUESTION);
case ':': return fixed(Symbol.COLON);
case '[': return fixed(Symbol.LBRACK);
case ']': return fixed(Symbol.RBRACK);
case '(': return fixed(Symbol.LPAREN);
case ')': return fixed(Symbol.RPAREN);
case ',': return fixed(Symbol.COMMA);
case '.':
if (!isDigit(c2)) {
return fixed(Symbol.DOT);
}
break;
case '=':
if (c2 == '=') {
return fixed(Symbol.EQ);
}
break;
case '&':
if (c2 == '&') {
return fixed(Symbol.AND);
}
break;
case '|':
if (c2 == '|') {
return fixed(Symbol.OR);
}
break;
case '!':
if (c2 == '=') {
return fixed(Symbol.NE);
}
return fixed(Symbol.NOT);
case '<':
if (c2 == '=') {
return fixed(Symbol.LE);
}
return fixed(Symbol.LT);
case '>':
if (c2 == '=') {
return fixed(Symbol.GE);
}
return fixed(Symbol.GT);
case '"':
case '\'': return nextString();
}
if (isDigit(c1) || c1 == '.') {
return nextNumber();
}
if (Character.isJavaIdentifierStart(c1)) {
int i = position+1;
int l = input.length();
while (i < l && Character.isJavaIdentifierPart(input.charAt(i))) {
i++;
}
String name = input.substring(position, i);
Token keyword = keyword(name);
return keyword == null ? token(Symbol.IDENTIFIER, name, i - position) : keyword;
}
throw new ScanException(position, "invalid character '" + c1 + "'", "expression token");
}
protected Token nextToken() throws ScanException {
if (isEval()) {
if (input.charAt(position) == '}') {
return fixed(Symbol.END_EVAL);
}
return nextEval();
} else {
if (position+1 < input.length() && input.charAt(position+1) == '{') {
switch (input.charAt(position)) {
case '#':
return fixed(Symbol.START_EVAL_DEFERRED);
case '$':
return fixed(Symbol.START_EVAL_DYNAMIC);
}
}
return nextText();
}
}
/**
* Scan next token.
* After calling this method, {@link #getToken()} and {@link #getPosition()}
* can be used to retreive the token's image and input position.
* @return scanned token
*/
public Token next() throws ScanException {
if (token != null) {
position += token.getSize();
}
int length = input.length();
if (isEval()) {
while (position < length && Character.isWhitespace(input.charAt(position))) {
position++;
}
}
if (position == length) {
return token = fixed(Symbol.EOF);
}
return token = nextToken();
}
}
| |
package de.flexiprovider.common.math.ellipticcurves;
//import java.math.BigInteger;
import java.util.Random;
import de.flexiprovider.api.Registry;
import de.flexiprovider.common.exceptions.DifferentCurvesException;
import de.flexiprovider.common.exceptions.DifferentFieldsException;
import de.flexiprovider.common.exceptions.InvalidFormatException;
import de.flexiprovider.common.exceptions.InvalidPointException;
import de.flexiprovider.common.exceptions.NoQuadraticResidueException;
import de.flexiprovider.common.math.FlexiBigInt;
import de.flexiprovider.common.math.IntegerFunctions;
import de.flexiprovider.common.math.finitefields.GFElement;
import de.flexiprovider.common.math.finitefields.GFPElement;
import de.flexiprovider.common.util.FlexiBigIntUtils;
/**
* This class implements points and their arithmetic on elliptic curves over
* finite prime fields (<i>GF(p)</i>). For more information on the arithmetic
* see for example <a href =
* http://www.certicom.com/research/online.html>Certicom online- tutorial</a>.
*
* @author Birgit Henhapl
* @see EllipticCurveGFP
* @see PointGFP
*/
public class PointGFP extends Point {
/**
* curve parameter a
*/
private GFPElement mA;
/**
* curve parameter b
*/
private GFPElement mB;
/**
* x-coordinate of this point
*/
private GFPElement mX;
/**
* y-coordinate of this point
*/
private GFPElement mY;
/**
* z-coordinate of this point
*/
private GFPElement mZ;
/**
* holds z<sup>2</sup> of this point
*/
private GFPElement mZ2;
/**
* holds z<sup>3</sup> of this point
*/
private GFPElement mZ3;
/**
* holds a * z<sup>3</sup> of this point
*/
private GFPElement mAZ4;
// /////////////////////////////////////////////////////////////
// constructors
// /////////////////////////////////////////////////////////////
/**
* Construct the point at infinity on the specified elliptic curve.
*
* @param E
* EllipticCurveGFP is the elliptic curve this point lies on
*/
public PointGFP(EllipticCurveGFP E) {
mE = E;
mP = E.getQ();
mA = (GFPElement) E.getA();
mB = (GFPElement) E.getB();
assignZero();
}
/**
* Construct a random point on the specified elliptic curve using the given
* source of randomness.
*
* @param E
* EllipticCurveGFP is the elliptic curve this point lies on
* @param rand
* the source of randomness
*/
public PointGFP(EllipticCurveGFP E, Random rand) {
mE = E;
mP = E.getQ();
mA = (GFPElement) E.getA();
mB = (GFPElement) E.getB();
// find random point
final GFPElement minusOne = new GFPElement(FlexiBigInt.ONE.negate(), E
.getQ());
mY = minusOne;
GFElement y2 = null;
GFElement x = null;
while (mY.equals(minusOne)) {
FlexiBigInt value = new FlexiBigInt(mP.bitLength(), Registry
.getSecureRandom());
mX = new GFPElement(value, mP);
y2 = mA.multiply(mX);
x = mX.multiply(mX);
x.multiplyThisBy(mX);
y2.addToThis(x.add(mB));
try {
value = IntegerFunctions.ressol(y2.toFlexiBigInt(), mP);
mY = new GFPElement(value, mP);
} catch (NoQuadraticResidueException NQRExc) {
mY = minusOne;
}
}
mZ = GFPElement.ONE(mP);
mZ2 = GFPElement.ONE(mP);
mZ3 = GFPElement.ONE(mP);
mAZ4 = mA;
}
/**
* Constructs point with specified parameters. This method throws an
* <tt>InvalidPointException</tt>, if (<tt>x</tt>, <tt>y</tt>,
* <tt>z</tt>) is not on curve <tt>E</tt>.
*
* @param x
* x-coordinate
* @param y
* y-coordinate
* @param E
* EllipticCurveGFP is the elliptic curve this point lies on
* @throws InvalidPointException
* if the specified point is not on the curve.
* @throws DifferentFieldsException
* if <tt>x</tt> and <tt>y</tt> are defined over
* different fields.
*/
public PointGFP(GFPElement x, GFPElement y, EllipticCurveGFP E)
throws InvalidPointException, DifferentFieldsException {
mE = E;
mP = E.getQ();
mA = (GFPElement) E.getA();
mB = (GFPElement) E.getB();
mX = (GFPElement) x.clone();
mY = (GFPElement) y.clone();
mZ = GFPElement.ONE(mP);
mZ2 = null;
mZ3 = null;
mAZ4 = null;
}
/**
* Constructs point with specified parameters. This method throws an
* <tt>InvalidPointException</tt>, if (<tt>x</tt>, <tt>y</tt>,
* <tt>z</tt>) is not on curve <tt>E</tt>.
*
* @param x
* x-coordinate
* @param y
* y-coordinate
* @param z
* z-coordinate
* @param E
* the elliptic curve this point lies on
* @throws InvalidPointException
* if the specified point is not on the curve.
* @throws DifferentFieldsException
* if <tt>x</tt>, <tt>y</tt>, and <tt>z</tt> are
* defined over different fields.
*/
public PointGFP(GFPElement x, GFPElement y, GFPElement z, EllipticCurveGFP E)
throws InvalidPointException, DifferentFieldsException {
mE = E;
mP = E.getQ();
mA = (GFPElement) E.getA();
mB = (GFPElement) E.getB();
mX = x;
mY = y;
mZ = z;
mZ2 = null;
mZ3 = null;
mAZ4 = null;
}
/**
* Constructs a new point. The information is packed in the given byte array
* together with the given elliptic curve. (see X9.63-199x)
*
* @param encoded
* the point in normal, compressed or hybrid form.
* @param E
* the underlying elliptic curve
* @throws InvalidPointException
* if the point is not on the curve.
* @throws InvalidFormatException
* if the point representation is invalid.
*/
public PointGFP(byte[] encoded, EllipticCurveGFP E)
throws InvalidPointException, InvalidFormatException {
mE = E;
mP = E.getQ();
mA = (GFPElement) E.getA();
mB = (GFPElement) E.getB();
// the zero point is encoded as a single byte 0
if (encoded.length == 1 && encoded[0] == 0) {
assignZero();
return;
}
// the first OCTET pc indicates the form the point is represented in:
// if pc = 2, the indicating bit is not set (point = pc | x)
// if pc = 3, the indicating bit is set (point = pc | x)
// if pc = 4, x and y are given: (point = pc | x | y), |x| = |y| =
// (|point| - 1) / 2)
// if pc = 6, x and y are given and the indicating bit is not set:
// (point = pc | x | y), |x| = |y| = (|point| - 1) / 2)
// if pc = 7, x and y are given and the indicating bit is set: (point =
// pc | x | y), |x| = |y| = (|point| - 1) / 2)
byte[] bX, bY;
GFPElement x, y, z;
final byte pc = encoded[0];
switch (pc) {
case 2:
case 3:
// compressed form
bX = new byte[encoded.length - 1];
System.arraycopy(encoded, 1, bX, 0, bX.length);
x = new GFPElement(new FlexiBigInt(1, bX), mP);
boolean yMod2 = (pc & 1) == 1;
y = decompress(yMod2, x);
break;
case 4:
// uncompressed form
int l = (encoded.length - 1) >> 1;
bX = new byte[l];
bY = new byte[l];
System.arraycopy(encoded, 1, bX, 0, l);
System.arraycopy(encoded, 1 + l, bY, 0, l);
x = new GFPElement(new FlexiBigInt(1, bX), mP);
y = new GFPElement(new FlexiBigInt(1, bY), mP);
break;
case 6:
case 7:
// hybrid form
l = (encoded.length - 1) >> 1;
bX = new byte[l];
bY = new byte[l];
System.arraycopy(encoded, 1, bX, 0, l);
System.arraycopy(encoded, 1 + l, bY, 0, l);
x = new GFPElement(new FlexiBigInt(1, bX), mP);
y = new GFPElement(new FlexiBigInt(1, bY), mP);
yMod2 = (pc & 0x01) == 1;
if (!(decompress(yMod2, x).equals(y))) {
throw new InvalidPointException();
}
break;
default:
throw new InvalidFormatException(pc);
}
z = GFPElement.ONE(mP);
assign(x, y, z);
}
/**
* Copy constructor.
*
* @param other
* point to copy
*/
public PointGFP(PointGFP other) {
mE = other.mE;
mP = other.mP;
mA = other.mA;
mB = other.mB;
assign(other);
}
// /////////////////////////////////////////////////////////////
// assignments
// /////////////////////////////////////////////////////////////
/**
* Assigns to this point the point at infinity. The coordinates of this
* point are (x, y, z) = (1, 1, 0).
*/
private void assignZero() {
mX = GFPElement.ONE(mP);
mY = GFPElement.ONE(mP);
mZ = GFPElement.ZERO(mP);
mZ2 = null;
mZ3 = null;
mAZ4 = null;
}
/**
* Assigns to this point the x-, y- and z-coordinates (<tt>x</tt>,
* <tt>y</tt>, <tt>z</tt>) (without copying).
*
* @param x
* FlexiBigInt is the x-coordinate
* @param y
* FlexiBigInt is the y-coordinate
* @param z
* FlexiBigInt is the z-coordinate
*/
private void assign(GFPElement x, GFPElement y, GFPElement z) {
mX = x;
mY = y;
mZ = z;
mZ2 = null;
mZ3 = null;
mAZ4 = null;
}
/**
* Assigns to this point the x-, y- and z-coordinates of the given other
* point (by copying the coordinates).
*
* @param other
* the other point
*/
private void assign(PointGFP other) {
mX = (GFPElement) other.mX.clone();
mY = (GFPElement) other.mY.clone();
mZ = (GFPElement) other.mZ.clone();
mZ2 = null;
mZ3 = null;
mAZ4 = null;
}
/**
* @return a clone of this point
*/
public Object clone() {
return new PointGFP(this);
}
/**
* Tests whether this Point is equal to other. The points are equal, if <br>
* <tt><tt>mX</tt>*<tt>other.mZ</tt><sup>2</sup> ==
* <tt>other.mX</tt>*<tt>mZ</tt><sup>2</sup></tt>
* and <tt><tt>mY</tt>*<tt>other.mZ</tt><sup>3</sup>
* <tt>other.mY</tt>*<tt>mZ</tt><sup>3</sup></tt>.
*
* @param other
* Point to compare this Point with
* @return <tt>(<tt>mX</tt>*<tt>other.mZ</tt><sup>2</sup> ==
* <tt>other.mX</tt>*<tt>mZ</tt><sup>2</sup>) <tt>AND</tt>
* (<tt>mY</tt>*<tt>other.mZ</tt><sup>3</sup>
* <tt>other.mY</tt>*<tt>mZ</tt><sup>3</sup>)</tt>
*/
public boolean equals(Object other) {
// Guard against other==null or being of an unsuitable type:
if (other == null || !(other instanceof PointGFP)) {
return false;
}
PointGFP otherPoint = (PointGFP) other;
if (!mE.equals(otherPoint.mE)) {
return false;
}
if (isZero() && otherPoint.isZero()) {
return true;
}
GFElement oX = (GFElement) otherPoint.mX.clone();
GFElement oY = (GFElement) otherPoint.mY.clone();
GFElement oZ = (GFElement) otherPoint.mZ.clone();
if (mZ.isOne() && oZ.isOne()) {
if (!(oX.equals(mX) && oY.equals(mY))) {
return false;
}
}
if (!mZ.isOne()) {
GFElement z = (GFElement) mZ.clone();
GFElement z2 = z.multiply(z);
GFElement z3 = z2.multiply(z);
oX.multiplyThisBy(z2);
oY.multiplyThisBy(z3);
}
GFElement x = (GFElement) mX.clone();
GFElement y = (GFElement) mY.clone();
if (!oZ.isOne()) {
GFElement oZ2 = oZ.multiply(oZ);
GFElement oZ3 = oZ2.multiply(oZ);
x.multiplyThisBy(oZ2);
y.multiplyThisBy(oZ3);
}
return oX.equals(x) && oY.equals(y);
}
/**
* @return the hash code of this point
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
// Two projective points are equal iff their corresponding
// affine representations are equal. We cannot simply sum over the
// (hash values of) projective coordinates because the projective
// representation is not unique: a given point (x,y) might be
// represented as (X,Y,Z) or (X',Y',Z').
//
// This hash code could possibly be precomputed whenever the value of
// this point changes.
return getXAffin().hashCode() + getYAffin().hashCode();
}
/**
* Returns this point in affine representation as a String: (x, y), where x =
* <tt>mX</tt>/<tt>mZ<sup>2</sup></tt> and y = <tt>mZ</tt>/<tt>mZ<sup>3</sup></tt>.
* If this point is at infinity (that means, mZ = 0), the output is (0, 0).
*
* @return String (x, y)
*/
public String toString() {
if (isZero()) {
return "(0, 0)";
}
return "(" + getXAffin().toString() + ",\n " + getYAffin().toString()
+ ")";
}
// ///////////////////////////////////////////////////////////
// access
// ///////////////////////////////////////////////////////////
/**
* @return the x-coordinate of this point
*/
public GFElement getX() {
return mX;
}
/**
* @return the y-coordinate of this point
*/
public GFElement getY() {
return mY;
}
/**
* @return the z-coordinate of this point
*/
public GFElement getZ() {
return mZ;
}
/**
* Return the x-coordinate of this point in affine representation. In this
* class, the projective representation x = X/Z<sup>2</sup> and y = Y/Z<sup>3</sup>
* is chosen to speed up point addition. This method returns the
* x-coordinate in affine representation.
*
* @return the x-coordinate of this point in affine representation
*/
public GFElement getXAffin() {
// TODO the zero point has no affine coordinates
if (isZero()) {
return GFPElement.ZERO(mP);
}
// return mX*mZ^-2
if (mZ2 == null) {
mZ2 = (GFPElement) mZ.multiply(mZ);
}
return mX.multiply(mZ2.invert());
}
/**
* Return the y-coordinate of this point in affine representation. In this
* class, the projective representation x = X/Z<sup>2</sup> and y = Y/Z<sup>3</sup>
* is chosen to speed up point addition. This method returns the
* y-coordinate in affine representation.
*
* @return the y-coordinate of this point in affine representation
*/
public GFElement getYAffin() {
// TODO the zero point has no affine coordinates
if (isZero()) {
return GFPElement.ZERO(mP);
}
// return mY*mZ^-3
if (mZ3 == null) {
mZ3 = (GFPElement) mZ.multiply(mZ).multiply(mZ);
}
return mY.multiply(mZ3.invert());
}
/**
* Returns this point with affine coordinates.
*
* @return <tt>this</tt>
*/
public Point getAffin() {
if (!(mZ.isOne()) && !(mZ.isZero())) {
GFElement z = mZ.invert();
GFElement z2 = z.multiply(z);
z.multiplyThisBy(z2);
GFElement x = mX.multiply(z2);
GFElement y = mY.multiply(z);
return new PointGFP((GFPElement) x, (GFPElement) y,
(EllipticCurveGFP) mE);
}
return this;
}
/**
* Tests whether this point is on the curve mE. This method returns
* <tt>true</tt>, if <br>
* <tt><tt>mY</tt><sup>2</sup> -
* <tt>mX</tt><sup>3</sup> - <tt>mA</tt>*<tt>mX</tt>*
* <tt>mZ</tt><sup>4</sup> - <tt>mB</tt>*<tt>mZ</tt><sup>6</sup>
* = 0</tt>,<br>
* otherwise <tt>false</tt>.
*
* @return <tt><tt>mY</tt><sup>2</sup> - <tt>mX</tt>
* <sup>3</sup> - <tt>mA</tt>*<tt>mX</tt>*<tt>mZ</tt>
* <sup>4</sup> - <tt>mB</tt>*<tt>mZ</tt><sup>6</sup> == 0</tt>
* @see EllipticCurveGFP
*/
public boolean onCurve() {
// The point at infinity is always on the curve:
if (isZero()) {
return true;
}
// y^2
final GFElement y2 = mY.multiply(mY);
// x^3
final GFElement x3 = mX.multiply(mX).multiply(mX);
/*
* If the jacobian coordinate Z is 1, we can use the simpler affine
* equation for E:
*/
if (mZ.isOne()) {
// Compare y^2 to (x^3 + ax + b):
final GFElement ax = mA.multiply(mX); // a*x
return y2.equals(x3.add(ax).add(mB));
}
/*
* Z != 1, we have to use the jacobian equation for E:
*/
// Update mZ* fields if necessary:
if (mZ2 == null) {
mZ2 = (GFPElement) mZ.multiply(mZ); // z^2
}
if (mZ3 == null) {
mZ3 = (GFPElement) mZ2.multiply(mZ); // z^3
}
if (mAZ4 == null) {
mAZ4 = (GFPElement) mZ3.multiply(mZ).multiply(mA); // a*z^4
}
// Compare y^2 to (x^3 + axz^4 + bz^6):
final GFElement aXZ4 = mAZ4.multiply(mX); // a*x*z^4
final GFElement bZ6 = mB.multiply(mZ3).multiply(mZ3); // b*z^6
return y2.equals(x3.add(aXZ4).add(bZ6));
}
/**
* @return <tt>true</tt> if this point is the point at infinity,
* <tt>false</tt> otherwise.
*/
public boolean isZero() {
return mX.isOne() && mY.isOne() && mZ.isZero();
}
// ////////////////////////////////////////////////////////////////////
// arithmetic
// ////////////////////////////////////////////////////////////////////
/**
* Adds to this point <tt>other</tt>. The formula is:<br>
* X<sub>3</sub> = -H<sup>3</sup> - 2U<sub>1</sub>H<sup>2</sup> + r<sup>2</sup><br>
* Y<sub>3</sub> = -S<sub>1</sub>H<sup>3</sup> + r*(U<sub>1</sub>H<sup>2</sup> -
* X<sub>3</sub><br>
* Z<sub>3</sub> = Z<sub>1</sub>Z<sub>2</sub>H<br>
* Z<sub>3</sub><sup>2</sup> = Z<sub>3</sub><sup>2</sup><br>
* Z<sub>3</sub><sup>3</sup> = Z<sub>3</sub><sup>3</sup><br>
* with<br>
* U<sub>1</sub> = X<sub>1</sub>Z<sub>2</sub><sup>2</sup><br>
* U<sub>2</sub> = X<sub>2</sub>Z<sub>1</sub><sup>2</sup><br>
* S<sub>1</sub> = Y<sub>1</sub>Z<sub>2</sub><sup>3</sup><br>
* S<sub>2</sub> = Y<sub>2</sub>Z<sub>1</sub><sup>3</sup><br>
* H = U<sub>2</sub> - U<sub>1</sub><br>
* r = S<sub>2</sub> - S<sub>1</sub><br>
*
* @param other
* point to add to this point
* @return <tt>this + other</tt>
*/
public Point add(Point other) {
PointGFP result = new PointGFP(this);
result.addToThis(other);
return result;
}
/**
* Adds to this point <tt>other</tt>. The formula is:<br>
* X<sub>3</sub> = -H<sup>3</sup> - 2U<sub>1</sub>H<sup>2</sup> + r<sup>2</sup><br>
* Y<sub>3</sub> = -S<sub>1</sub>H<sup>3</sup> + r*(U<sub>1</sub>H<sup>2</sup> -
* X<sub>3</sub><br>
* Z<sub>3</sub> = Z<sub>1</sub>Z<sub>2</sub>H<br>
* Z<sub>3</sub><sup>2</sup> = Z<sub>3</sub><sup>2</sup><br>
* Z<sub>3</sub><sup>3</sup> = Z<sub>3</sub><sup>3</sup><br>
* with<br>
* U<sub>1</sub> = X<sub>1</sub>Z<sub>2</sub><sup>2</sup><br>
* U<sub>2</sub> = X<sub>2</sub>Z<sub>1</sub><sup>2</sup><br>
* S<sub>1</sub> = Y<sub>1</sub>Z<sub>2</sub><sup>3</sup><br>
* S<sub>2</sub> = Y<sub>2</sub>Z<sub>1</sub><sup>3</sup><br>
* H = U<sub>2</sub> - U<sub>1</sub><br>
* r = S<sub>2</sub> - S<sub>1</sub><br>
*
* @param other
* point to add to this point
*/
public void addToThis(Point other) {
if (!(other instanceof PointGFP)) {
throw new DifferentCurvesException();
}
PointGFP otherPoint = (PointGFP) other;
if (isZero()) {
assign(otherPoint);
return;
}
if (other.isZero()) {
return;
}
GFElement oX = otherPoint.mX;
GFElement oY = otherPoint.mY;
GFElement oZ = otherPoint.mZ;
GFElement oZ2 = otherPoint.mZ2;
GFElement oZ3 = otherPoint.mZ3;
GFElement U1 = null;
GFElement U2 = null;
GFElement S1 = null;
GFElement S2 = null;
if (oZ.isOne()) {
// U_1 = X_1*Z_22
//
U1 = mX;
// S_1 = Y_1*Z_23
//
S1 = mY;
} else {
if (oZ2 == null || oZ3 == null) {
oZ2 = oZ.multiply(oZ);
oZ3 = oZ2.multiply(oZ);
}
// U_1 = X_1*Z_22
//
U1 = mX.multiply(oZ2);
// S_1 = Y_1*Z_23
//
S1 = mY.multiply(oZ3);
}
if (mZ.isOne()) {
// U_2 = X_2*Z_12
//
U2 = oX;
// S_2 = Y_2*Z_13
//
S2 = oY;
} else {
if (mZ2 == null || mZ3 == null) {
mZ2 = (GFPElement) mZ.multiply(mZ);
mZ3 = (GFPElement) mZ2.multiply(mZ);
}
// U_2 = X_2*Z_12
//
U2 = oX.multiply(mZ2);
// S_2 = Y_2*Z_13
//
S2 = oY.multiply(mZ3);
}
// H = U2 - U1
//
GFElement H = U2.subtract(U1);
// 3 = S2 - S1
//
GFElement r = S2.subtract(S1);
if (H.isZero()) {
if (r.isZero()) {
multiplyThisBy2();
return;
}
assignZero();
return;
}
// U2 = H^2
//
U2 = H.multiply(H);
// S2 = H^3
//
S2 = U2.multiply(H);
// U2 = U1H^2
//
U2.multiplyThisBy(U1);
// x = r^2 - S2 - 2U2
//
GFElement x = r.multiply(r).subtract(S2).subtract(U2.add(U2));
// y = r(U2 - x) -S1S2
//
GFElement z = S1.multiply(S2);
GFElement y = r.multiply(U2.subtract(x)).subtract(z);
// z = Z1Z2H
//
if (mZ.isOne()) {
if (!oZ.isOne()) {
z = oZ.multiply(H);
} else {
z = H;
}
} else if (!oZ.isOne()) {
U1 = mZ.multiply(oZ);
z = U1.multiply(H);
} else {
z = mZ.multiply(H);
}
assign((GFPElement) x, (GFPElement) y, (GFPElement) z);
}
/**
* Adds in affine coordinates to this point the point <code>other</code>.
*
* @param other
* point to add to this point
* @exception DifferentCurvesException
* when <code>other</code> is defined over another
* curve
* @return <code>this</code> + <code>other</code> in affine coordinates
*/
public Point addAffine(Point other) {
PointGFP p = (PointGFP) this.getAffin();
PointGFP o = (PointGFP) other.getAffin();
if (this.isZero()) {
return new PointGFP(o);
}
if (other.isZero()) {
return new PointGFP(p);
}
GFPElement oX = o.mX;
GFPElement oY = o.mY;
GFPElement pX = p.mX;
GFPElement pY = p.mY;
FlexiBigInt boX = oX.toFlexiBigInt();
FlexiBigInt boY = oY.toFlexiBigInt();
FlexiBigInt bpX = pX.toFlexiBigInt();
FlexiBigInt bpY = pY.toFlexiBigInt();
// P == other -> double(P)
if ((pX == oX) && (pY == oY)) {
return p.multiplyBy2Affine();
}
FlexiBigInt lambda = (boX.subtract(bpX)).modInverse(mP);
lambda = lambda.multiply(boY.subtract(bpY)).mod(mP);
FlexiBigInt x = lambda.multiply(lambda).mod(mP);
x = x.subtract(bpX).subtract(boX);
x = x.mod(mP);
FlexiBigInt y = bpX.subtract(x);
y = y.multiply(lambda);
y = y.subtract(bpY).mod(mP);
GFPElement gfpx = new GFPElement(x, mP);
GFPElement gfpy = new GFPElement(y, mP);
try {
return new PointGFP(gfpx, gfpy, (EllipticCurveGFP) mE);
} catch (InvalidPointException IPExc) {
throw new RuntimeException("InvalidPointException: "
+ IPExc.getMessage());
}
}
/**
* Subtracts point <tt>other</tt> from this point.
*
* @param other
* another Point
* @return <tt>this</tt> - <tt>other</tt>
*/
public Point subtract(Point other) {
PointGFP result = new PointGFP(this);
result.subtractFromThis(other);
return result;
}
/**
* Subtracts point <tt>other</tt> from this point.
*
* @param other
* another Point
*/
public void subtractFromThis(Point other) {
if (!(other instanceof PointGFP)) {
throw new DifferentCurvesException();
}
PointGFP minusOther = (PointGFP) other.negate();
if (isZero()) {
assign(minusOther.mX, minusOther.mY, minusOther.mZ);
} else {
addToThis(minusOther);
}
}
/**
* Returns the inverse of this point.
*
* @return -<tt>this</tt>
*/
public Point negate() {
PointGFP result = new PointGFP(this);
result.negateThis();
return result;
}
/**
* Returns the inverse of this point.
*/
public void negateThis() {
if (!isZero()) {
// y = -mY mod mP
FlexiBigInt y = mP.add(mY.toFlexiBigInt().negate());
mY = new GFPElement(y, mP);
}
}
/**
* Returns 2*<tt>this</tt>. The formula is:<br>
* X<sub>2</sub> = T<br>
* Y<sub>2</sub> = 8Y<sup>4</sup> + M(S - T)<br>
* Z<sub>2</sub> = 2YZ<br>
* Z<sub>2</sub><sup>2</sup> = Z<sub>2</sub><sup>2</sup><br>
* Z<sub>2</sub><sup>3</sup> = Z<sub>2</sub><sup>3</sup><br>
* with<br>
* S = 4XY<sup>2</sup><br>
* M = 3X<sup>2</sup> + a(Z<sup>2</sup>)<sup>2</sup><br>
* T = -2S + M<sup>2</sup>
*
* @return 2*<tt>this</tt>
*/
public Point multiplyBy2() {
PointGFP result = new PointGFP(this);
result.multiplyThisBy2();
return result;
}
/**
* This = 2*<tt>this</tt>. The formula is:<br>
* X<sub>2</sub> = -2S + M<sup>2</sup><br>
* Y<sub>2</sub> = M(S - X<sub>2</sub>) - T<br>
* Z<sub>2</sub> = 2YZ<br>
* Z<sub>2</sub><sup>2</sup> = Z<sub>2</sub><sup>2</sup><br>
* Z<sub>2</sub><sup>3</sup> = Z<sub>2</sub><sup>3</sup><br>
* with<br>
* S = 4XY<sup>2</sup><br>
* M = 3X<sup>2</sup> + aZ<sup>4</sup><br>
* T = 8Y<sup>4</sup>
*/
public void multiplyThisBy2() {
if (isZero()) {
assignZero();
return;
}
if (mY.isZero()) {
assignZero();
return;
}
// z = Y^2
GFElement z = mY.multiply(mY);
// S = 4XY^2
GFElement S = mX.multiply(z);
GFElement x = S.add(S);
S = x.add(x);
// M = 3X^2 + a(Z^2)^2
//
if (mAZ4 == null) {
if (mZ.isOne()) {
mAZ4 = (GFPElement) mA.clone();
} else {
if (mZ2 == null) {
mZ2 = (GFPElement) mZ.multiply(mZ);
}
x = mZ2.multiply(mZ2);
mAZ4 = (GFPElement) mA.multiply(x);
}
}
GFElement y = mX.multiply(mX);
GFElement M = y.add(y).add(y).add(mAZ4); // 3X^2+aZ^4
// T = x = -2S + M^2
//
x = M.multiply(M).subtract(S.add(S));
// y = -8Y^4 + M(S - T)
//
y = z.multiply(z);
GFElement U = y.add(y); // 2Y^4
z = U.add(U);
U = z.add(z); // 8Y^4
y = M.multiply(S.subtract(x)).subtract(U);
// z = 2YZ;
//
if (!mZ.isOne()) {
z = mY.multiply(mZ);
} else {
z = mY;
}
z = z.add(z);
assign((GFPElement) x, (GFPElement) y, (GFPElement) z);
}
/**
* Doubles this point in affine coordinates.
*
* @return 2*<code>this</code> in affine coordinates
*/
public Point multiplyBy2Affine() {
if (this.isZero()) {
return new PointGFP((EllipticCurveGFP) this.mE);
}
if (this.mY.equals(FlexiBigInt.ZERO)) {
return new PointGFP((EllipticCurveGFP) mE);
}
PointGFP p = (PointGFP) this.getAffin();
FlexiBigInt pX = p.mX.toFlexiBigInt();
FlexiBigInt pY = p.mY.toFlexiBigInt();
FlexiBigInt lambda, x, y, tmp;
tmp = pY.add(pY).modInverse(mP);
lambda = pX.multiply(pX).mod(mP);
lambda = lambda
.multiply(new FlexiBigInt(java.lang.Integer.toString(3))).mod(
mP);
lambda = lambda.add(mA.toFlexiBigInt());
lambda = lambda.multiply(tmp).mod(mP);
x = lambda.multiply(lambda).mod(mP);
x = x.subtract(pX.add(pX)).mod(mP);
y = pX.subtract(x);
y = lambda.multiply(y);
y = y.subtract(pY).mod(mP);
GFPElement gfpx = new GFPElement(x, mP);
GFPElement gfpy = new GFPElement(y, mP);
return new PointGFP(gfpx, gfpy, (EllipticCurveGFP) p.mE);
}
// ////////////////////////////////////////////////////////////////////
// Output
// ////////////////////////////////////////////////////////////////////
/**
* Returns this point in affine, decompressed form as a byte array. The
* first byte keeps the value 4, to indicate, that this point is stored in
* an uncompressed format. The rest of the returned array is split in two
* halves, the first holds the x-coordinate <tt>mX</tt> and the second
* they-coordinate <tt>mY</tt>.
*
* @return <tt>this</tt> as byte array
*/
byte[] encodeUncompressed() {
// the zero point is encoded as a single byte 0
if (isZero()) {
return new byte[1];
}
int l = mP.bitLength();
final int dummy = l & 7;
if (dummy != 0) {
l += 8 - dummy;
}
l >>>= 3;
byte[] encoded = new byte[(l << 1) + 1];
encoded[0] = 4;
FlexiBigInt x = getXAffin().toFlexiBigInt();
FlexiBigInt y = getYAffin().toFlexiBigInt();
byte[] bX = FlexiBigIntUtils.toMinimalByteArray(x);
byte[] bY = FlexiBigIntUtils.toMinimalByteArray(y);
System.arraycopy(bX, 0, encoded, 1 + l - bX.length, bX.length);
System.arraycopy(bY, 0, encoded, 1 + (l << 1) - bY.length, bY.length);
return encoded;
}
/**
* Returns this point in affine, compressed form as a byte array. The first
* byte keeps the value 2 or 3, to indicate, that this point is stored in a
* compressed format. The rest of the returned array is the x-coordinate
* <tt>mX</tt>.
*
* @return <tt>this</tt> as byte array
*/
byte[] encodeCompressed() {
// the zero point is encoded as a single byte 0
if (isZero()) {
return new byte[1];
}
int l = mP.bitLength();
int dummy = l & 7;
if (dummy != 0) {
l += 8 - dummy;
}
l >>>= 3;
byte[] encoded = new byte[l + 1];
encoded[0] = 2;
FlexiBigInt x = getXAffin().toFlexiBigInt();
byte[] bX = FlexiBigIntUtils.toMinimalByteArray(x);
System.arraycopy(bX, 0, encoded, 1 + l - bX.length, bX.length);
FlexiBigInt y = getYAffin().toFlexiBigInt();
if (y.testBit(0)) {
encoded[0] |= 1;
}
return encoded;
}
/**
* Returns this point in affine, hybrid form as a byte array. The first byte
* keeps the value 6 or 7, to indicate, that this point is stored in a
* hybrid format. The rest of the returned array is split in two halves, the
* first holds the x-coordinate <tt>mX</tt> and the second they-coordinate
* <tt>mY</tt>.
*
* @return <tt>this</tt> as byte array
*/
byte[] encodeHybrid() {
// the zero point is encoded as a single byte 0
if (isZero()) {
return new byte[1];
}
int l = mP.bitLength();
final int dummy = l & 7;
if (dummy != 0) {
l += 8 - dummy;
}
l >>>= 3;
byte[] encoded = new byte[(l << 1) + 1];
encoded[0] = 6;
FlexiBigInt x = getXAffin().toFlexiBigInt();
FlexiBigInt y = getYAffin().toFlexiBigInt();
byte[] bX = FlexiBigIntUtils.toMinimalByteArray(x);
byte[] bY = FlexiBigIntUtils.toMinimalByteArray(y);
System.arraycopy(bX, 0, encoded, 1 + l - bX.length, bX.length);
System.arraycopy(bY, 0, encoded, 1 + (l << 1) - bY.length, bY.length);
if (y.testBit(0)) {
encoded[0] |= 1;
}
return encoded;
}
// ////////////////////////////////////////////////////////////////////
// help functions
// ////////////////////////////////////////////////////////////////////
/**
* Computes the y-coordinate from the given x-coordinate, the elliptic curve
* mE and the least significant bit yMod2 of y. Let g = x<sup>3</sup> + ax +
* b mod p and z = sqrt(g) mod p. Then, y = z if either<br>
* y is even and yMod2 = 0 or<br>
* y is odd and yMod2 = 1.<br>
* Otherwise, y = p - z.
*/
private GFPElement decompress(boolean yMod2, GFElement x)
throws InvalidPointException {
// compute g = x^3 + ax + b mod p
FlexiBigInt xVal = x.toFlexiBigInt();
// x3 = x^3
FlexiBigInt x3 = xVal.multiply(xVal).multiply(xVal);
FlexiBigInt g = mA.toFlexiBigInt().multiply(xVal);
g = g.add(x3);
g = g.add(mB.toFlexiBigInt());
g = g.mod(mP);
FlexiBigInt z;
try {
// compute z = sqrt(g) mod p
z = IntegerFunctions.ressol(g, mP);
} catch (NoQuadraticResidueException NQRExc) {
throw new InvalidPointException("NoQuadraticResidueException: "
+ NQRExc.getMessage());
}
// if lowest bit of z and yMod2 are not equal, compute z = p - z
boolean zMod2 = z.testBit(0);
if ((zMod2 && !yMod2) || (!zMod2 && yMod2)) {
z = mP.subtract(z);
}
return new GFPElement(z, mP);
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying parameters of a newly created pipeline cache.
*
* <h5>Valid Usage</h5>
*
* <ul>
* <li>If {@code initialDataSize} is not 0, it <b>must</b> be equal to the size of {@code pInitialData}, as returned by {@link VK10#vkGetPipelineCacheData GetPipelineCacheData} when {@code pInitialData} was originally retrieved</li>
* <li>If {@code initialDataSize} is not 0, {@code pInitialData} <b>must</b> have been retrieved from a previous call to {@link VK10#vkGetPipelineCacheData GetPipelineCacheData}</li>
* </ul>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code sType} <b>must</b> be {@link VK10#VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO}</li>
* <li>{@code pNext} <b>must</b> be {@code NULL}</li>
* <li>{@code flags} <b>must</b> be 0</li>
* <li>If {@code initialDataSize} is not 0, {@code pInitialData} <b>must</b> be a pointer to an array of {@code initialDataSize} bytes</li>
* </ul>
*
* <h5>See Also</h5>
*
* <p>{@link VK10#vkCreatePipelineCache CreatePipelineCache}</p>
*
* <h3>Member documentation</h3>
*
* <ul>
* <li>{@code sType} – the type of this structure.</li>
* <li>{@code pNext} – {@code NULL} or a pointer to an extension-specific structure.</li>
* <li>{@code flags} – reserved for future use.</li>
* <li>{@code initialDataSize} – the number of bytes in {@code pInitialData}. If {@code initialDataSize} is zero, the pipeline cache will initially be empty.</li>
* <li>{@code pInitialData} – a pointer to previously retrieved pipeline cache data. If the pipeline cache data is incompatible (as defined below) with the device, the pipeline cache will be initially empty. If {@code initialDataSize} is zero, {@code pInitialData} is ignored.</li>
* </ul>
*
* <h3>Layout</h3>
*
* <pre><code>struct VkPipelineCacheCreateInfo {
VkStructureType sType;
const void * pNext;
VkPipelineCacheCreateFlags flags;
size_t initialDataSize;
const void * pInitialData;
}</code></pre>
*/
public class VkPipelineCacheCreateInfo extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
STYPE,
PNEXT,
FLAGS,
INITIALDATASIZE,
PINITIALDATA;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(4),
__member(POINTER_SIZE),
__member(POINTER_SIZE)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
STYPE = layout.offsetof(0);
PNEXT = layout.offsetof(1);
FLAGS = layout.offsetof(2);
INITIALDATASIZE = layout.offsetof(3);
PINITIALDATA = layout.offsetof(4);
}
VkPipelineCacheCreateInfo(long address, ByteBuffer container) {
super(address, container);
}
/**
* Creates a {@link VkPipelineCacheCreateInfo} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkPipelineCacheCreateInfo(ByteBuffer container) {
this(memAddress(container), checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** Returns the value of the {@code sType} field. */
public int sType() { return nsType(address()); }
/** Returns the value of the {@code pNext} field. */
public long pNext() { return npNext(address()); }
/** Returns the value of the {@code flags} field. */
public int flags() { return nflags(address()); }
/** Returns the value of the {@code initialDataSize} field. */
public long initialDataSize() { return ninitialDataSize(address()); }
/** Returns a {@link ByteBuffer} view of the data pointed to by the {@code pInitialData} field. */
public ByteBuffer pInitialData() { return npInitialData(address()); }
/** Sets the specified value to the {@code sType} field. */
public VkPipelineCacheCreateInfo sType(int value) { nsType(address(), value); return this; }
/** Sets the specified value to the {@code pNext} field. */
public VkPipelineCacheCreateInfo pNext(long value) { npNext(address(), value); return this; }
/** Sets the specified value to the {@code flags} field. */
public VkPipelineCacheCreateInfo flags(int value) { nflags(address(), value); return this; }
/** Sets the address of the specified {@link ByteBuffer} to the {@code pInitialData} field. */
public VkPipelineCacheCreateInfo pInitialData(ByteBuffer value) { npInitialData(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkPipelineCacheCreateInfo set(
int sType,
long pNext,
int flags,
ByteBuffer pInitialData
) {
sType(sType);
pNext(pNext);
flags(flags);
pInitialData(pInitialData);
return this;
}
/** Unsafe version of {@link #set(VkPipelineCacheCreateInfo) set}. */
public VkPipelineCacheCreateInfo nset(long struct) {
memCopy(struct, address(), SIZEOF);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkPipelineCacheCreateInfo set(VkPipelineCacheCreateInfo src) {
return nset(src.address());
}
// -----------------------------------
/** Returns a new {@link VkPipelineCacheCreateInfo} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkPipelineCacheCreateInfo malloc() {
return create(nmemAlloc(SIZEOF));
}
/** Returns a new {@link VkPipelineCacheCreateInfo} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkPipelineCacheCreateInfo calloc() {
return create(nmemCalloc(1, SIZEOF));
}
/** Returns a new {@link VkPipelineCacheCreateInfo} instance allocated with {@link BufferUtils}. */
public static VkPipelineCacheCreateInfo create() {
return new VkPipelineCacheCreateInfo(BufferUtils.createByteBuffer(SIZEOF));
}
/** Returns a new {@link VkPipelineCacheCreateInfo} instance for the specified memory address or {@code null} if the address is {@code NULL}. */
public static VkPipelineCacheCreateInfo create(long address) {
return address == NULL ? null : new VkPipelineCacheCreateInfo(address, null);
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer malloc(int capacity) {
return create(nmemAlloc(capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static Buffer calloc(int capacity) {
return create(nmemCalloc(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static Buffer create(int capacity) {
return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF));
}
/**
* Create a {@link VkPipelineCacheCreateInfo.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static Buffer create(long address, int capacity) {
return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity);
}
// -----------------------------------
/** Returns a new {@link VkPipelineCacheCreateInfo} instance allocated on the thread-local {@link MemoryStack}. */
public static VkPipelineCacheCreateInfo mallocStack() {
return mallocStack(stackGet());
}
/** Returns a new {@link VkPipelineCacheCreateInfo} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */
public static VkPipelineCacheCreateInfo callocStack() {
return callocStack(stackGet());
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkPipelineCacheCreateInfo mallocStack(MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkPipelineCacheCreateInfo callocStack(MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated on the thread-local {@link MemoryStack}.
*
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity) {
return mallocStack(capacity, stackGet());
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero.
*
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity) {
return callocStack(capacity, stackGet());
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer mallocStack(int capacity, MemoryStack stack) {
return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkPipelineCacheCreateInfo.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static Buffer callocStack(int capacity, MemoryStack stack) {
return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #sType}. */
public static int nsType(long struct) { return memGetInt(struct + VkPipelineCacheCreateInfo.STYPE); }
/** Unsafe version of {@link #pNext}. */
public static long npNext(long struct) { return memGetAddress(struct + VkPipelineCacheCreateInfo.PNEXT); }
/** Unsafe version of {@link #flags}. */
public static int nflags(long struct) { return memGetInt(struct + VkPipelineCacheCreateInfo.FLAGS); }
/** Unsafe version of {@link #initialDataSize}. */
public static long ninitialDataSize(long struct) { return memGetAddress(struct + VkPipelineCacheCreateInfo.INITIALDATASIZE); }
/** Unsafe version of {@link #pInitialData() pInitialData}. */
public static ByteBuffer npInitialData(long struct) { return memByteBuffer(memGetAddress(struct + VkPipelineCacheCreateInfo.PINITIALDATA), (int)ninitialDataSize(struct)); }
/** Unsafe version of {@link #sType(int) sType}. */
public static void nsType(long struct, int value) { memPutInt(struct + VkPipelineCacheCreateInfo.STYPE, value); }
/** Unsafe version of {@link #pNext(long) pNext}. */
public static void npNext(long struct, long value) { memPutAddress(struct + VkPipelineCacheCreateInfo.PNEXT, value); }
/** Unsafe version of {@link #flags(int) flags}. */
public static void nflags(long struct, int value) { memPutInt(struct + VkPipelineCacheCreateInfo.FLAGS, value); }
/** Sets the specified value to the {@code initialDataSize} field of the specified {@code struct}. */
public static void ninitialDataSize(long struct, long value) { memPutAddress(struct + VkPipelineCacheCreateInfo.INITIALDATASIZE, value); }
/** Unsafe version of {@link #pInitialData(ByteBuffer) pInitialData}. */
public static void npInitialData(long struct, ByteBuffer value) { memPutAddress(struct + VkPipelineCacheCreateInfo.PINITIALDATA, memAddressSafe(value)); ninitialDataSize(struct, value == null ? 0 : value.remaining()); }
/**
* Validates pointer members that should not be {@code NULL}.
*
* @param struct the struct to validate
*/
public static void validate(long struct) {
if ( ninitialDataSize(struct) != 0 )
checkPointer(memGetAddress(struct + VkPipelineCacheCreateInfo.PINITIALDATA));
}
/**
* Calls {@link #validate(long)} for each struct contained in the specified struct array.
*
* @param array the struct array to validate
* @param count the number of structs in {@code array}
*/
public static void validate(long array, int count) {
for ( int i = 0; i < count; i++ )
validate(array + i * SIZEOF);
}
// -----------------------------------
/** An array of {@link VkPipelineCacheCreateInfo} structs. */
public static class Buffer extends StructBuffer<VkPipelineCacheCreateInfo, Buffer> implements NativeResource {
/**
* Creates a new {@link VkPipelineCacheCreateInfo.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkPipelineCacheCreateInfo#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) {
return new Buffer(address, container, mark, pos, lim, cap);
}
@Override
protected VkPipelineCacheCreateInfo newInstance(long address) {
return new VkPipelineCacheCreateInfo(address, container);
}
@Override
protected int sizeof() {
return SIZEOF;
}
/** Returns the value of the {@code sType} field. */
public int sType() { return VkPipelineCacheCreateInfo.nsType(address()); }
/** Returns the value of the {@code pNext} field. */
public long pNext() { return VkPipelineCacheCreateInfo.npNext(address()); }
/** Returns the value of the {@code flags} field. */
public int flags() { return VkPipelineCacheCreateInfo.nflags(address()); }
/** Returns the value of the {@code initialDataSize} field. */
public long initialDataSize() { return VkPipelineCacheCreateInfo.ninitialDataSize(address()); }
/** Returns a {@link ByteBuffer} view of the data pointed to by the {@code pInitialData} field. */
public ByteBuffer pInitialData() { return VkPipelineCacheCreateInfo.npInitialData(address()); }
/** Sets the specified value to the {@code sType} field. */
public VkPipelineCacheCreateInfo.Buffer sType(int value) { VkPipelineCacheCreateInfo.nsType(address(), value); return this; }
/** Sets the specified value to the {@code pNext} field. */
public VkPipelineCacheCreateInfo.Buffer pNext(long value) { VkPipelineCacheCreateInfo.npNext(address(), value); return this; }
/** Sets the specified value to the {@code flags} field. */
public VkPipelineCacheCreateInfo.Buffer flags(int value) { VkPipelineCacheCreateInfo.nflags(address(), value); return this; }
/** Sets the address of the specified {@link ByteBuffer} to the {@code pInitialData} field. */
public VkPipelineCacheCreateInfo.Buffer pInitialData(ByteBuffer value) { VkPipelineCacheCreateInfo.npInitialData(address(), value); return this; }
}
}
| |
/*******************************************************************************
* Copyright (c) 2012, Institute for Pervasive Computing, ETH Zurich.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Institute nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* This file is part of the Californium (Cf) CoAP framework.
******************************************************************************/
package ch.ethz.inf.vs.californium.layers;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Exchanger;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.HttpResponse;
import org.apache.http.HttpResponseInterceptor;
import org.apache.http.HttpStatus;
import org.apache.http.HttpVersion;
import org.apache.http.StatusLine;
import org.apache.http.client.protocol.RequestAcceptEncoding;
import org.apache.http.client.protocol.ResponseContentEncoding;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.DefaultConnectionReuseStrategy;
import org.apache.http.impl.EnglishReasonPhraseCatalog;
import org.apache.http.impl.nio.DefaultHttpServerIODispatch;
import org.apache.http.impl.nio.DefaultNHttpServerConnection;
import org.apache.http.impl.nio.DefaultNHttpServerConnectionFactory;
import org.apache.http.impl.nio.reactor.DefaultListeningIOReactor;
import org.apache.http.message.BasicStatusLine;
import org.apache.http.nio.NHttpConnectionFactory;
import org.apache.http.nio.protocol.BasicAsyncRequestConsumer;
import org.apache.http.nio.protocol.BasicAsyncRequestHandler;
import org.apache.http.nio.protocol.HttpAsyncExchange;
import org.apache.http.nio.protocol.HttpAsyncRequestConsumer;
import org.apache.http.nio.protocol.HttpAsyncRequestHandler;
import org.apache.http.nio.protocol.HttpAsyncRequestHandlerRegistry;
import org.apache.http.nio.protocol.HttpAsyncService;
import org.apache.http.nio.reactor.IOEventDispatch;
import org.apache.http.nio.reactor.ListeningIOReactor;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.params.SyncBasicHttpParams;
import org.apache.http.protocol.HttpContext;
import org.apache.http.protocol.HttpProcessor;
import org.apache.http.protocol.HttpRequestHandler;
import org.apache.http.protocol.ImmutableHttpProcessor;
import org.apache.http.protocol.ResponseConnControl;
import org.apache.http.protocol.ResponseContent;
import org.apache.http.protocol.ResponseDate;
import org.apache.http.protocol.ResponseServer;
import ch.ethz.inf.vs.californium.coap.Message;
import ch.ethz.inf.vs.californium.coap.Request;
import ch.ethz.inf.vs.californium.coap.Response;
import ch.ethz.inf.vs.californium.util.HttpTranslator;
import ch.ethz.inf.vs.californium.util.InvalidFieldException;
import ch.ethz.inf.vs.californium.util.InvalidMethodException;
import ch.ethz.inf.vs.californium.util.Properties;
import ch.ethz.inf.vs.californium.util.TranslationException;
/**
* Class encapsulating the logic of a http server. The class create a receiver
* thread that it is always blocked on the listen primitive. For each connection
* this thread creates a new thread that handles the client/server dialog.
*
* @author Francesco Corazza
*/
public class HttpStack extends UpperLayer {
private static final int SOCKET_TIMEOUT = Properties.std.getInt("HTTP_SERVER_SOCKET_TIMEOUT");
private static final int GATEWAY_TIMEOUT = SOCKET_TIMEOUT * 3 / 4;
private static final String SERVER_NAME = "Californium Http Proxy";
private static final int SOCKET_BUFFER_SIZE = Properties.std.getInt("HTTP_SERVER_SOCKET_BUFFER_SIZE");
/**
* Resource associated with the proxying behavior. If a client requests
* resource indicated by
* http://proxy-address/PROXY_RESOURCE_NAME/coap-server, the proxying
* handler will forward the request desired coap server.
*/
private static final String PROXY_RESOURCE_NAME = "proxy";
/**
* The resource associated with the local resources behavior. If a client
* requests resource indicated by
* http://proxy-address/LOCAL_RESOURCE_NAME/coap-resource, the proxying
* handler will forward the request to the local resource requested.
*/
public static final String LOCAL_RESOURCE_NAME = "local";
private final ConcurrentHashMap<Request, Exchanger<Response>> exchangeMap = new ConcurrentHashMap<Request, Exchanger<Response>>();
/**
* Instantiates a new http stack on the requested port. It creates an http
* listener thread on the port.
*
* @param httpPort
* the http port
* @throws IOException
* Signals that an I/O exception has occurred.
*/
public HttpStack(int httpPort) throws IOException {
new HttpServer(httpPort);
}
/**
* Checks if a thread is waiting for the arrive of a specific response.
*
* @param request
* the request
* @return true, if is waiting
*/
public boolean isWaitingRequest(Request request) {
// DEBUG
// System.out.println(request.hashCode());
// request.prettyPrint();
//
// System.out.println(responseMap.get(request) != null);
// System.out.println(semaphoreMap.get(request) != null);
//
// for (Request r : responseMap.keySet()) {
// System.out.println(r.hashCode());
// r.prettyPrint();
// }
//
// for (Request r : semaphoreMap.keySet()) {
// System.out.println(r.hashCode());
// r.prettyPrint();
// }
// check the presence of the key in both maps
// TODO check how much is this operation heavy
// return responseMap.containsKey(request) &&
// semaphoreMap.containsKey(request);
return exchangeMap.containsKey(request);
}
/**
* Send simple http response.
*
* @param httpExchange
* the http exchange
* @param httpCode
* the http code
*/
private void sendSimpleHttpResponse(HttpAsyncExchange httpExchange, int httpCode) {
// get the empty response from the exchange
HttpResponse httpResponse = httpExchange.getResponse();
// create and set the status line
StatusLine statusLine = new BasicStatusLine(HttpVersion.HTTP_1_1, httpCode, EnglishReasonPhraseCatalog.INSTANCE.getReason(httpCode, Locale.ENGLISH));
httpResponse.setStatusLine(statusLine);
// send the error response
httpExchange.submitResponse();
}
/*
* (non-Javadoc)
* @see
* ch.ethz.inf.vs.californium.layers.UpperLayer#doSendMessage(ch.ethz.inf
* .vs.californium.coap.Message)
*/
@Override
protected void doSendMessage(Message message) throws IOException {
// the http stack is intended to send back only coap responses
// check if the message is a response
if (message instanceof Response) {
// retrieve the request linked to the response
Response response = (Response) message;
Request request = response.getRequest();
LOG.info("Handling response for request: " + request);
// fill the exchanger with the incoming response
Exchanger<Response> exchanger = exchangeMap.get(request);
try {
exchanger.exchange(response);
} catch (InterruptedException e) {
LOG.warning("Exchange interrupted: " + e.getMessage());
// remove the entry from the map
exchangeMap.remove(request);
return;
}
LOG.info("Exchanged correctly");
}
}
/**
* The Class CoapResponseWorker. This thread waits a response from the lower
* layers. It is the consumer of the producer/consumer pattern.
*
* @author Francesco Corazza
*/
private final class CoapResponseWorker extends Thread {
private final HttpAsyncExchange httpExchange;
private final HttpRequest httpRequest;
private final Request coapRequest;
/**
* Instantiates a new coap response worker.
*
* @param name
* the name
* @param coapRequest
* the coap request
* @param httpExchange
* the http exchange
* @param httpRequest
* the http request
*/
public CoapResponseWorker(String name, Request coapRequest, HttpAsyncExchange httpExchange, HttpRequest httpRequest) {
super(name);
this.coapRequest = coapRequest;
this.httpExchange = httpExchange;
this.httpRequest = httpRequest;
}
/*
* (non-Javadoc)
* @see java.lang.Thread#run()
*/
@Override
public void run() {
// get the exchanger
Exchanger<Response> exchanger = exchangeMap.get(coapRequest);
// if the map does not contain the key, send an error response
if (exchanger == null) {
LOG.warning("exchanger == null");
sendSimpleHttpResponse(httpExchange, HttpStatus.SC_INTERNAL_SERVER_ERROR);
return;
}
// get the response
Response coapResponse = null;
try {
coapResponse = exchanger.exchange(Response.NULL, GATEWAY_TIMEOUT, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
LOG.warning("Timeout occurred");
// send the timeout error message
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_TIMEOUT);
return;
} catch (InterruptedException e) {
// if the thread is interrupted, terminate
if (isInterrupted()) {
LOG.warning("Thread interrupted");
sendSimpleHttpResponse(httpExchange, HttpStatus.SC_INTERNAL_SERVER_ERROR);
return;
}
} finally {
// remove the entry from the map
exchangeMap.remove(coapRequest);
LOG.finer("Entry removed from map");
}
if (coapResponse == null) {
LOG.warning("No coap response");
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_NOT_FOUND);
return;
}
// get the sample http response
HttpResponse httpResponse = httpExchange.getResponse();
try {
// translate the coap response in an http response
HttpTranslator.getHttpResponse(httpRequest, coapResponse, httpResponse);
LOG.finer("Outgoing http response: " + httpResponse.getStatusLine());
} catch (TranslationException e) {
LOG.warning("Failed to translate coap response to http response: " + e.getMessage());
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_TRANSLATION_ERROR);
return;
}
// send the response
httpExchange.submitResponse();
}
}
private class HttpServer {
public HttpServer(int httpPort) {
// HTTP parameters for the server
HttpParams params = new SyncBasicHttpParams();
params.setIntParameter(CoreConnectionPNames.SO_TIMEOUT, SOCKET_TIMEOUT).setIntParameter(CoreConnectionPNames.SOCKET_BUFFER_SIZE, SOCKET_BUFFER_SIZE).setBooleanParameter(CoreConnectionPNames.TCP_NODELAY, true).setParameter(CoreProtocolPNames.ORIGIN_SERVER, SERVER_NAME);
// Create HTTP protocol processing chain
// Use standard server-side protocol interceptors
HttpRequestInterceptor[] requestInterceptors = new HttpRequestInterceptor[] { new RequestAcceptEncoding() };
HttpResponseInterceptor[] responseInterceptors = new HttpResponseInterceptor[] { new ResponseContentEncoding(), new ResponseDate(), new ResponseServer(), new ResponseContent(), new ResponseConnControl() };
HttpProcessor httpProcessor = new ImmutableHttpProcessor(requestInterceptors, responseInterceptors);
// Create request handler registry
HttpAsyncRequestHandlerRegistry registry = new HttpAsyncRequestHandlerRegistry();
// register the handler that will reply to the proxy requests
registry.register("/" + PROXY_RESOURCE_NAME + "/*", new ProxyAsyncRequestHandler(PROXY_RESOURCE_NAME, true));
// register the handler for the frontend
registry.register("/" + LOCAL_RESOURCE_NAME + "/*", new ProxyAsyncRequestHandler(LOCAL_RESOURCE_NAME, false));
// register the default handler for root URIs
// wrapping a common request handler with an async request handler
registry.register("*", new BasicAsyncRequestHandler(new BaseRequestHandler()));
// Create server-side HTTP protocol handler
HttpAsyncService protocolHandler = new HttpAsyncService(httpProcessor, new DefaultConnectionReuseStrategy(), registry, params);
// Create HTTP connection factory
NHttpConnectionFactory<DefaultNHttpServerConnection> connFactory = new DefaultNHttpServerConnectionFactory(params);
// Create server-side I/O event dispatch
final IOEventDispatch ioEventDispatch = new DefaultHttpServerIODispatch(protocolHandler, connFactory);
final ListeningIOReactor ioReactor;
try {
// Create server-side I/O reactor
ioReactor = new DefaultListeningIOReactor();
// Listen of the given port
ioReactor.listen(new InetSocketAddress(httpPort));
// create the listener thread
Thread listener = new Thread("HttpStack listener") {
@Override
public void run() {
// Starts the reactor and initiates the dispatch of I/O
// event notifications to the given IOEventDispatch.
try {
LOG.info("Submitted http listening to thread 'HttpStack listener'");
ioReactor.execute(ioEventDispatch);
} catch (IOException e) {
LOG.severe("Interrupted");
}
LOG.info("Shutdown HttpStack");
}
};
listener.setDaemon(false);
listener.start();
LOG.info("HttpStack started");
} catch (IOException e) {
LOG.severe("I/O error: " + e.getMessage());
}
}
/**
* The Class BaseRequestHandler handles simples requests that do not
* need the proxying.
*
* @author Francesco Corazza
*/
private class BaseRequestHandler implements HttpRequestHandler {
/*
* (non-Javadoc)
* @see
* org.apache.http.protocol.HttpRequestHandler#handle(org.apache
* .http .HttpRequest, org.apache.http.HttpResponse,
* org.apache.http.protocol.HttpContext)
*/
@Override
public void handle(HttpRequest httpRequest, HttpResponse httpResponse, HttpContext httpContext) throws HttpException, IOException {
httpResponse.setStatusCode(HttpStatus.SC_OK);
httpResponse.setEntity(new StringEntity("Californium Proxy server"));
LOG.finer("Root request handled");
}
}
/**
* Class associated with the http service to translate the http requests
* in coap requests and to produce the http responses. Even if the class
* accepts a string indicating the name of the proxy resource, it is
* still thread-safe because the local resource is set in the
* constructor and then only read by the methods.
*
* @author Francesco Corazza
*/
private class ProxyAsyncRequestHandler implements
HttpAsyncRequestHandler<HttpRequest> {
private final String localResource;
private final boolean proxyingEnabled;
/**
* Instantiates a new proxy request handler.
*
* @param localResource
* the local resource
* @param proxyingEnabled
*/
public ProxyAsyncRequestHandler(String localResource, boolean proxyingEnabled) {
super();
this.localResource = localResource;
this.proxyingEnabled = proxyingEnabled;
}
/*
* (non-Javadoc)
* @see
* org.apache.http.nio.protocol.HttpAsyncRequestHandler#handle(java.
* lang.Object, org.apache.http.nio.protocol.HttpAsyncExchange,
* org.apache.http.protocol.HttpContext)
*/
@Override
public void handle(HttpRequest httpRequest, HttpAsyncExchange httpExchange, HttpContext httpContext) throws HttpException, IOException {
LOG.finer("Incoming http request: " + httpRequest.getRequestLine());
try {
// translate the request in a valid coap request
Request coapRequest = HttpTranslator.getCoapRequest(httpRequest, localResource, proxyingEnabled);
// fill the maps
exchangeMap.put(coapRequest, new Exchanger<Response>());
LOG.finer("Fill exchange with: " + coapRequest);
// the new thread will wait for the completion of
// the coap request
Thread worker = new CoapResponseWorker("HttpStack Worker", coapRequest, httpExchange, httpRequest);
// starting the "consumer thread" that will sleep waiting
// for the producer
worker.start();
LOG.finer("Started thread 'httpStack worker' to wait the response");
// send the coap request to the upper layers
doReceiveMessage(coapRequest);
} catch (InvalidMethodException e) {
LOG.warning("Method not implemented" + e.getMessage());
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_WRONG_METHOD);
return;
} catch (InvalidFieldException e) {
LOG.warning("Request malformed" + e.getMessage());
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_URI_MALFORMED);
return;
} catch (TranslationException e) {
LOG.warning("Failed to translate the http request in a valid coap request: " + e.getMessage());
sendSimpleHttpResponse(httpExchange, HttpTranslator.STATUS_TRANSLATION_ERROR);
return;
}
}
/*
* (non-Javadoc)
* @see
* org.apache.http.nio.protocol.HttpAsyncRequestHandler#processRequest
* (org.apache.http.HttpRequest,
* org.apache.http.protocol.HttpContext)
*/
@Override
public HttpAsyncRequestConsumer<HttpRequest> processRequest(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException {
// Buffer request content in memory for simplicity
return new BasicAsyncRequestConsumer();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.wicket.ui.pages.entity;
import java.util.List;
import org.apache.wicket.Application;
import org.apache.wicket.RestartResponseException;
import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeInstantiation;
import org.apache.wicket.event.Broadcast;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.link.BookmarkablePageLink;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.util.string.Strings;
import org.apache.isis.applib.Identifier;
import org.apache.isis.applib.NonRecoverableException;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.commons.authentication.AuthenticationSession;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.adapter.mgr.AdapterManager.ConcurrencyChecking;
import org.apache.isis.core.metamodel.adapter.version.ConcurrencyException;
import org.apache.isis.core.metamodel.consent.InteractionInitiatedBy;
import org.apache.isis.core.metamodel.consent.InteractionResult;
import org.apache.isis.core.metamodel.deployment.DeploymentCategory;
import org.apache.isis.core.metamodel.interactions.InteractionUtils;
import org.apache.isis.core.metamodel.interactions.ObjectVisibilityContext;
import org.apache.isis.core.metamodel.interactions.VisibilityContext;
import org.apache.isis.core.metamodel.spec.ObjectSpecification;
import org.apache.isis.core.metamodel.spec.feature.Contributed;
import org.apache.isis.core.metamodel.spec.feature.ObjectAssociation;
import org.apache.isis.core.metamodel.spec.feature.ObjectMember;
import org.apache.isis.core.runtime.system.DeploymentType;
import org.apache.isis.core.runtime.system.context.IsisContext;
import org.apache.isis.viewer.wicket.model.common.PageParametersUtils;
import org.apache.isis.viewer.wicket.model.hints.IsisUiHintEvent;
import org.apache.isis.viewer.wicket.model.models.EntityModel;
import org.apache.isis.viewer.wicket.ui.ComponentType;
import org.apache.isis.viewer.wicket.ui.components.widgets.breadcrumbs.BreadcrumbModel;
import org.apache.isis.viewer.wicket.ui.components.widgets.breadcrumbs.BreadcrumbModelProvider;
import org.apache.isis.viewer.wicket.ui.pages.PageAbstract;
import org.apache.isis.viewer.wicket.ui.util.CssClassAppender;
/**
* Web page representing an entity.
*/
@AuthorizeInstantiation("org.apache.isis.viewer.wicket.roles.USER")
public class EntityPage extends PageAbstract {
private static final long serialVersionUID = 1L;
private final EntityModel model;
/**
* Called reflectively, in support of
* {@link BookmarkablePageLink bookmarkable} links.
*/
public EntityPage(final PageParameters pageParameters) {
this(pageParameters, createEntityModel(pageParameters));
}
/**
* Creates an EntityModel from the given page parameters.
* Redirects to the application home page if there is no OID in the parameters.
*
* @param parameters The page parameters with the OID
* @return An EntityModel for the requested OID
*/
private static EntityModel createEntityModel(final PageParameters parameters) {
String oid = EntityModel.oidStr(parameters);
if (Strings.isEmpty(oid)) {
throw new RestartResponseException(Application.get().getHomePage());
}
return new EntityModel(parameters);
}
private EntityPage(final PageParameters pageParameters, final EntityModel entityModel) {
this(pageParameters, entityModel, null);
}
public EntityPage(final ObjectAdapter adapter) {
this(adapter, null);
}
/**
* Ensure that any {@link ConcurrencyException} that might have occurred already
* (eg from an action invocation) is show.
*/
public EntityPage(final ObjectAdapter adapter, final ConcurrencyException exIfAny) {
this(PageParametersUtils.newPageParameters(), newEntityModel(adapter, exIfAny));
}
private static EntityModel newEntityModel(
final ObjectAdapter adapter,
final ConcurrencyException exIfAny) {
final EntityModel model = new EntityModel(adapter);
model.setException(exIfAny);
return model;
}
private EntityPage(
final PageParameters pageParameters,
final EntityModel entityModel,
final String titleString) {
super(pageParameters, titleString, ComponentType.ENTITY);
this.model = entityModel;
final ObjectAdapter objectAdapter;
try {
// check object still exists
objectAdapter = entityModel.getObject();
} catch(final RuntimeException ex) {
removeAnyBookmark(model);
removeAnyBreadcrumb(model);
// we throw an authorization exception here to avoid leaking out information as to whether the object exists or not.
throw new ObjectMember.AuthorizationException(ex);
}
// check that the entity overall can be viewed.
if(!isVisible(objectAdapter)) {
throw new ObjectMember.AuthorizationException();
}
// belt-n-braces: check that at least one property of the entity can be viewed.
final AuthenticationSession session = getAuthenticationSession();
final ObjectSpecification specification = objectAdapter.getSpecification();
final List<ObjectAssociation> visibleAssociation = specification.getAssociations(Contributed.INCLUDED, ObjectAssociation.Filters.dynamicallyVisible(
objectAdapter, InteractionInitiatedBy.USER, Where.NOWHERE));
if(visibleAssociation.isEmpty()) {
final List<ObjectAssociation> anyAssociations = specification.getAssociations(Contributed.INCLUDED);
if(anyAssociations.isEmpty()) {
throw new NonRecoverableException(String.format(
"No properties are defined for this entity type (%s); this is probably a programming error", specification.getFullIdentifier()));
}
throw new ObjectMember.AuthorizationException();
}
// the next bit is a work-around for JRebel integration...
// ... even though the IsisJRebelPlugin calls invalidateCache, it seems that there is
// some caching elsewhere in the Wicket viewer meaning that stale metadata is referenced.
// doing an additional call here seems to be sufficient, though not exactly sure why... :-(
if(!getDeploymentType().isProduction()) {
getSpecificationLoader().invalidateCacheFor(objectAdapter.getObject());
}
if(titleString == null) {
final String titleStr = objectAdapter.titleString(null);
setTitle(titleStr);
}
WebMarkupContainer entityPageContainer = new WebMarkupContainer("entityPageContainer");
entityPageContainer.add(new CssClassAppender(new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
ObjectAdapter adapter = entityModel.getObject();
return adapter.getObject().getClass().getSimpleName();
}
}));
themeDiv.addOrReplace(entityPageContainer);
addChildComponents(entityPageContainer, model);
// bookmarks and breadcrumbs
bookmarkPage(model);
addBreadcrumb(entityModel);
addBookmarkedPages(entityPageContainer);
// TODO mgrigorov: Zero Clipboard has been moved to EntityIconAndTitlePanel where the entity model is available.
// Is this still needed for something else ?!
//
// ensure the copy link holds this page.
send(this, Broadcast.BREADTH, new IsisUiHintEvent(entityModel, null));
}
private boolean isVisible(final ObjectAdapter input) {
final InteractionResult visibleResult =
InteractionUtils.isVisibleResult(input.getSpecification(), createVisibleInteractionContext(input
));
return visibleResult.isNotVetoing();
}
private VisibilityContext<?> createVisibleInteractionContext(
final ObjectAdapter objectAdapter) {
final Identifier identifier = objectAdapter.getSpecification().getIdentifier();
return new ObjectVisibilityContext(
objectAdapter, identifier, InteractionInitiatedBy.USER,
Where.OBJECT_FORMS);
}
private void addBreadcrumb(final EntityModel entityModel) {
final BreadcrumbModelProvider session = (BreadcrumbModelProvider) getSession();
final BreadcrumbModel breadcrumbModel = session.getBreadcrumbModel();
breadcrumbModel.visited(entityModel);
}
private void removeAnyBreadcrumb(final EntityModel entityModel) {
final BreadcrumbModelProvider session = (BreadcrumbModelProvider) getSession();
final BreadcrumbModel breadcrumbModel = session.getBreadcrumbModel();
breadcrumbModel.remove(entityModel);
}
/**
* A rather crude way of intercepting the redirect-and-post strategy.
*
* <p>
* Performs eager loading of corresponding {@link EntityModel}, with
* {@link ConcurrencyChecking#NO_CHECK no} concurrency checking.
*/
@Override
protected void onBeforeRender() {
this.model.load(ConcurrencyChecking.NO_CHECK);
super.onBeforeRender();
}
private DeploymentType getDeploymentType() {
return IsisContext.getDeploymentType();
}
protected DeploymentCategory getDeploymentCategory() {
return getDeploymentType().getDeploymentCategory();
}
}
| |
package org.arnolds.agileappproject.agileappmodule.ui.frags;
import android.app.ActionBar;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.app.Fragment;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import org.arnolds.agileappproject.agileappmodule.R;
import org.arnolds.agileappproject.agileappmodule.git.GitHubBroker;
import org.arnolds.agileappproject.agileappmodule.git.GitHubBrokerListener;
import org.arnolds.agileappproject.agileappmodule.ui.activities.DrawerLayoutFragmentActivity;
import org.arnolds.agileappproject.agileappmodule.utils.AgileAppModuleUtils;
import org.kohsuke.github.GHRepository;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class NavigationDrawerFragment extends Fragment {
private static final long REPOS_POLL_RATE_SECONDS = Long.MAX_VALUE;
private static int LAST_SELECTED_ITEM_INDEX = 0;
private NavigationDrawerCallbacks mCallbacks;
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int latestMenuItemSelected = -1;
private Spinner mRepoSelectionSpinner;
private String latestSelectedRepoName = "";
private final SelectionListener selectionListener = new SelectionListener();
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(Boolean.TRUE);
}
private class SelectionListener extends GitHubBrokerListener {
@Override
public void onRepoSelected(boolean result) {
try {
mCallbacks.onNewRepoSelected(latestSelectedRepoName);
}
catch (NullPointerException ex) {
}
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View ret = inflater.inflate(
R.layout.fragment_navigation_drawer_layout, container, false);
mDrawerListView = (ListView) ret.findViewById(R.id.navigation_drawer_list_view);
mRepoSelectionSpinner = (Spinner) ret.findViewById(R.id.repo_selector_view);
mRepoSelectionSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
LAST_SELECTED_ITEM_INDEX = position;
String repoName = mRepoSelectionSpinner.getItemAtPosition(position).toString();
if (!repoName.isEmpty() && !repoName.contentEquals(latestSelectedRepoName)) {
NavigationDrawerFragment.this.latestSelectedRepoName = repoName;
try {
GitHubBroker.getInstance().selectRepo(repoName, selectionListener);
}
catch (GitHubBroker.AlreadyNotConnectedException e) {
Log.wtf("debug", e.getClass().getName(), e);
}
catch (GitHubBroker.NullArgumentException e) {
Log.wtf("debug", e.getClass().getName(), e);
}
mCallbacks.onStartLoad();
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
mRepoSelectionSpinner.setBackgroundColor(getResources().getColor(R.color.theme_white));
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
latestMenuItemSelected = position;
}
});
mDrawerListView
.setAdapter(new NavigationDrawerArrayAdapter());
initializeAutoUpdaterRepoSelector(mRepoSelectionSpinner);
return ret;
}
private final void initializeAutoUpdaterRepoSelector(final Spinner selectionSpinner) {
final ScheduledExecutorService reposFetchService = Executors
.newScheduledThreadPool(1);
reposFetchService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if (isDrawerOpen()) {
return;
}
try {
GitHubBroker.getInstance().getAllRepos(new GitHubBrokerListener() {
@Override
public void onAllReposRetrieved(boolean success,
Collection<GHRepository> repositories) {
if (success) {
final List<String> allRepositories = new ArrayList<String>();
for (GHRepository repository : repositories)
allRepositories.add(repository.getName());
try {
final ArrayAdapter<String> adapter =
new ArrayAdapter<String>(
getActivity().getApplicationContext(),
R.layout.repo_selector_spinner_selected_item,
allRepositories);
adapter.setDropDownViewResource(
R.layout.repo_selector_dropdown_item);
final String newSelectedRepoName =
selectionSpinner.getSelectedItem() == null ? "" :
selectionSpinner.getSelectedItem().toString();
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
selectionSpinner.setAdapter(adapter);
adapter.notifyDataSetChanged();
if (newSelectedRepoName.isEmpty()) {
selectionSpinner
.setSelection(LAST_SELECTED_ITEM_INDEX);
}
}
});
}
catch (NullPointerException ex) {
}
}
}
});
}
catch (GitHubBroker.AlreadyNotConnectedException e) {
Log.wtf("debug", e.getClass().getName(), e);
}
}
}, 0, REPOS_POLL_RATE_SECONDS, TimeUnit.SECONDS);
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setHomeButtonEnabled(Boolean.TRUE);
actionBar.setDisplayHomeAsUpEnabled(Boolean.TRUE);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
restoreActionBar();
getActivity().invalidateOptionsMenu();
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
showGlobalContextActionBar();
}
};
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void restoreActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setDisplayShowTitleEnabled(Boolean.TRUE);
actionBar.setTitle(
AgileAppModuleUtils.getString(getActivity(), "title_section" + (
DrawerLayoutFragmentActivity.getLastSelectedFragmentIndex() + 1),
"Home"
)
);
}
private void selectItem(int position) {
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
}
catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
if (mDrawerLayout != null) {
if (isDrawerOpen()) {
showGlobalContextActionBar();
}
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return mDrawerToggle.onOptionsItemSelected(item) || super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the actionbar app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(Boolean.TRUE);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return getActivity().getActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
void onNewRepoSelected(String repoName);
void onStartLoad();
}
private class NavigationDrawerArrayAdapter extends BaseAdapter {
private final int mResource = R.layout.list_item_navigation_drawer_list;
@Override
public int getCount() {
Context context = getActivity().getApplicationContext();
for (int i = 1; ; i++) {
if (AgileAppModuleUtils.getString(context, "title_section" + i, null) == null) {
return i - 1;
}
}
}
@Override
public Object getItem(int i) {
int temp = i + 1;
return AgileAppModuleUtils
.getString(getActivity().getApplicationContext(), "title_section" + temp, "");
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
LayoutInflater inflater =
(LayoutInflater) getActivity().getApplicationContext().getSystemService(
Context.LAYOUT_INFLATER_SERVICE);
convertView = inflater.inflate(mResource, parent, false);
int temp = position + 1;
TextView textView =
(TextView) convertView.findViewById(R.id.navigation_drawer_item_title);
ImageView imageView =
(ImageView) convertView.findViewById(R.id.navigation_drawer_item_icon);
if (latestMenuItemSelected != -1)
if (position==(DrawerLayoutFragmentActivity.getLastSelectedFragmentIndex())){
convertView.setBackgroundColor(Color.GRAY);
textView.setTypeface(null, Typeface.BOLD);
}
textView.setText(AgileAppModuleUtils
.getString(getActivity().getApplicationContext(), "title_section" + +temp, ""));
imageView.setImageResource(
AgileAppModuleUtils.getDrawableAsId("icon_section" + temp, -1));
return convertView;
}
}
}
| |
/*
Copyright 1996-2013 Ariba, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$Id: //ariba/platform/util/core/ariba/util/core/Chrono.java#10 $
*/
package ariba.util.core;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.util.List;
import ariba.util.log.Log;
import ariba.util.log.Logger;
/**
Simple timing class, used for timing one or more trials of an event.
The trials must be timed consecutively, so {@link #start} or
{@link #stop} cannot be called twice in a row.
@aribaapi ariba
*/
public class Chrono
{
private static final int overhead = calibrate();
private static final int calibrateReps = 100;
private static int calibrate ()
{
Chrono c = new Chrono ("calibrate");
for (int i = 0; i < calibrateReps; i++) {
c.start();
c.stop();
}
return c.totalTime / calibrateReps;
}
protected boolean enabled;
private long start;
private long stop;
private int totalTime;
private int trials;
private String name;
private Logger log;
private int startInfoId = -1;
private int stopInfoId = -1;
private long cpuStart;
private long cpuStop;
private long userStart;
private long userStop;
private static ThreadLocal<List<String>> _stack = new ThreadLocal<List<String>>()
{
@Override
protected List<String> initialValue ()
{
return ListUtil.list();
}
};
private String parent = null;
/**
Creates a new instance of a <code>Chrono</code> timer.
@param name the name of the event. Used by {@link #toString}
to print out the timing summary.
*/
public Chrono (String name)
{
this(name, null);
}
/**
Creates a new instance of a <code>Chrono</code> timer.
@param name the name of the event. Used by {@link #toString}
to print out the timing summary.
@param logger the Logger to use to print debug statements
*/
public Chrono (String name, Logger logger)
{
this.name = name;
enabled = true;
start = 0;
stop = 0;
cpuStart = 0;
cpuStop = 0;
userStart = 0;
userStop = 0;
totalTime = 0;
trials = 0;
log = logger;
}
/**
Creates a new instance of a <code>Chrono</code> timer.
@param name the name of the event. Used by {@link #toString}
to print out the timing summary.
@param logger the Logger to use to print debug statements
@param startInfoId id of a resource for logging an INFO start message to logger.
@param stopInfoId id of a resource for logging an INFO stop message to logger.
*/
public Chrono (String name, Logger logger, int startInfoId, int stopInfoId)
{
this(name, logger);
this.startInfoId = startInfoId;
this.stopInfoId = stopInfoId;
}
/**
Resets the counters to start timing from scratch.
*/
public void reset ()
{
start = 0;
stop = 0;
cpuStart = 0;
cpuStop = 0;
userStart = 0;
userStop = 0;
totalTime = 0;
trials = 0;
_stack.get().clear();
}
/**
Starts a trial. Must always be called before {@link #stop}.
*/
public void start ()
{
if (start != 0) {
// Chrono {0} started twice
Log.util.warning(2792, name);
}
if (enabled) {
start = System.currentTimeMillis();
if (log != null && log.isDebugEnabled()) {
ThreadMXBean bean = ManagementFactory.getThreadMXBean();
cpuStart = bean.getCurrentThreadCpuTime();
userStart = bean.getCurrentThreadUserTime();
}
List<String> stack = null;
if (_stack != null) {
stack = _stack.get();
}
if (!ListUtil.nullOrEmptyList(stack)) {
parent = stack.get(stack.size() - 1);
}
if (stack != null) {
stack.add(name);
}
if (log != null) {
String fullName = Fmt.S("%s (%s) (Thread %s)",
name, parent, Thread.currentThread().getName());
if (startInfoId > 0) {
log.info(startInfoId, fullName);
}
else {
log.debug("Phase '%s' started", fullName);
}
}
}
}
/**
Stops timing the current trial.
*/
public void stop ()
{
if (enabled) {
if (start == 0) {
// Chrono {0}: stop() called before start()
Log.util.warning(4317, name);
stop = 0;
}
else {
stop = System.currentTimeMillis();
if (log != null && log.isDebugEnabled()) {
ThreadMXBean bean = ManagementFactory.getThreadMXBean();
cpuStop = bean.getCurrentThreadCpuTime();
userStop = bean.getCurrentThreadUserTime();
}
}
int trialTime = (int)(stop - start);
totalTime += trialTime;
trials++;
if (log != null) {
String fullName = Fmt.S("%s (%s) (Thread %s)",
name, parent, Thread.currentThread().getName());
if (stopInfoId > 0 && log.isInfoEnabled()) {
log.info(stopInfoId,
fullName,
Integer.toString(trials),
Double.toString(trialTime / 1000),
getExtraInfo(true));
}
else if (log.isDebugEnabled()) {
log.debug("Phase '%s' (trial #%s) finished: duration=%ss%s",
fullName,
Integer.toString(trials),
Double.toString(trialTime / 1000),
getExtraInfo(true));
}
}
start = 0;
stop = 0;
List<String> stack = null;
if (_stack != null) {
stack = _stack.get();
if (!ListUtil.nullOrEmptyList(stack)) {
stack.remove(stack.size() - 1);
}
}
}
}
/**
Enables the timer, so that calls to {@link #start} and
{@link #stop} gather more timing information.
*/
public void enable ()
{
enabled = true;
}
/**
Disables the timer. Subsequent calls to {@link #start} and
{@link #stop} will have no effect on timing information.
*/
public void disable ()
{
enabled = false;
}
/**
@return the total number of seconds for all trials
*/
public double getTotalSeconds ()
{
return ((double)totalTime) / 1000.0;
}
/**
@return the average number of seconds for all trials.
*/
public double getAverageSeconds ()
{
if (trials == 0) {
return 0.0;
}
return getTotalSeconds() / (double)trials;
}
/**
@return the number of trials that were timed.
*/
public int getTrialCount ()
{
return this.trials;
}
/**
@return a description of the event, number of trials and
timing information
*/
public String toString ()
{
if (trials == 0) {
return Fmt.S("Timing for %s: no trials", name);
}
if (trials == 1) {
return Fmt.S("Timing for %s (%s) (Thread %s): %ss%s",
name,
parent,
Thread.currentThread().getName(),
Double.toString(getTotalSeconds()),
getExtraInfo(false));
}
return Fmt.S("Timing for %s (%s) (Thread %s) (%s trials): total = %ss, avg = %ss%s",
new Object[] {name,
parent,
Thread.currentThread().getName(),
Integer.toString(trials),
Double.toString(getTotalSeconds()),
Double.toString(getAverageSeconds()),
getExtraInfo(false)});
}
/**
Hook to get more information from subclasses
@param latestTrial indicates whether the extra info is only for
the latest trial or for all the trials that occurred
@return more information to display
@aribaapi ariba
*/
protected String getExtraInfo (boolean latestTrial)
{
if (log != null && log.isDebugEnabled()) {
return Fmt.S(" (User: %sms, Total: %sms)",
(userStop - userStart)/1000000, (cpuStop - cpuStart)/1000000);
}
return "";
}
}
| |
package com.digitalpetri.enip.cpf;
import java.nio.charset.Charset;
import io.netty.buffer.ByteBuf;
public final class CipIdentityItem extends CpfItem {
public static final int TYPE_ID = 0x0C;
private final int protocolVersion;
private final SockAddr socketAddress;
private final int vendorId;
private final int deviceType;
private final int productCode;
private final short revisionMajor;
private final short revisionMinor;
private final short status;
private final long serialNumber;
private final String productName;
private final short state;
/**
* @param protocolVersion encapsulation protocol version supported (also returned with
* {@link com.digitalpetri.enip.commands.RegisterSession} reply).
* @param socketAddress {@link SockAddr} structure.
* @param vendorId device manufacturers vendor ID.
* @param deviceType device type of product.
* @param productCode product code assigned with respect to device type.
* @param revisionMajor device major revision.
* @param revisionMinor device minor revision.
* @param status current status of device.
* @param serialNumber serial number of device.
* @param productName human readable description of device.
* @param state current state of device.
*/
public CipIdentityItem(int protocolVersion,
SockAddr socketAddress,
int vendorId,
int deviceType,
int productCode,
short revisionMajor,
short revisionMinor,
short status,
long serialNumber,
String productName,
short state) {
super(TYPE_ID);
this.protocolVersion = protocolVersion;
this.socketAddress = socketAddress;
this.vendorId = vendorId;
this.deviceType = deviceType;
this.productCode = productCode;
this.revisionMajor = revisionMajor;
this.revisionMinor = revisionMinor;
this.status = status;
this.serialNumber = serialNumber;
this.productName = productName;
this.state = state;
}
public int getProtocolVersion() {
return protocolVersion;
}
public SockAddr getSocketAddress() {
return socketAddress;
}
public int getVendorId() {
return vendorId;
}
public int getDeviceType() {
return deviceType;
}
public int getProductCode() {
return productCode;
}
public short getRevisionMajor() {
return revisionMajor;
}
public short getRevisionMinor() {
return revisionMinor;
}
public short getStatus() {
return status;
}
public long getSerialNumber() {
return serialNumber;
}
public String getProductName() {
return productName;
}
public short getState() {
return state;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CipIdentityItem that = (CipIdentityItem) o;
return deviceType == that.deviceType &&
productCode == that.productCode &&
protocolVersion == that.protocolVersion &&
revisionMajor == that.revisionMajor &&
revisionMinor == that.revisionMinor &&
serialNumber == that.serialNumber &&
state == that.state &&
status == that.status &&
vendorId == that.vendorId &&
productName.equals(that.productName) &&
socketAddress.equals(that.socketAddress);
}
@Override
public int hashCode() {
int result = protocolVersion;
result = 31 * result + socketAddress.hashCode();
result = 31 * result + vendorId;
result = 31 * result + deviceType;
result = 31 * result + productCode;
result = 31 * result + (int) revisionMajor;
result = 31 * result + (int) revisionMinor;
result = 31 * result + (int) status;
result = 31 * result + (int) (serialNumber ^ (serialNumber >>> 32));
result = 31 * result + productName.hashCode();
result = 31 * result + (int) state;
return result;
}
public static ByteBuf encode(CipIdentityItem item, ByteBuf buffer) {
buffer.writeShort(TYPE_ID);
// Length placeholder...
int lengthStartIndex = buffer.writerIndex();
buffer.writeShort(0);
// Encode the item...
int itemStartIndex = buffer.writerIndex();
buffer.writeShort(item.getProtocolVersion());
SockAddr.encode(item.getSocketAddress(), buffer);
buffer.writeShort(item.getVendorId());
buffer.writeShort(item.getDeviceType());
buffer.writeShort(item.getProductCode());
buffer.writeByte(item.getRevisionMajor());
buffer.writeByte(item.getRevisionMinor());
buffer.writeShort(item.getStatus());
buffer.writeInt((int) item.getSerialNumber());
writeString(item.getProductName(), buffer);
buffer.writeByte(item.getState());
// Go back and update the length.
int bytesWritten = buffer.writerIndex() - itemStartIndex;
buffer.markWriterIndex();
buffer.writerIndex(lengthStartIndex);
buffer.writeShort(bytesWritten);
buffer.resetWriterIndex();
return buffer;
}
public static CipIdentityItem decode(ByteBuf buffer) {
int typeId = buffer.readUnsignedShort();
buffer.skipBytes(2); // length
assert (typeId == TYPE_ID);
return new CipIdentityItem(
buffer.readUnsignedShort(),
SockAddr.decode(buffer),
buffer.readUnsignedShort(),
buffer.readUnsignedShort(),
buffer.readUnsignedShort(),
buffer.readUnsignedByte(),
buffer.readUnsignedByte(),
buffer.readShort(),
buffer.readUnsignedInt(),
readString(buffer),
buffer.readUnsignedByte()
);
}
private static String readString(ByteBuf buffer) {
int length = Math.min(buffer.readUnsignedByte(), 255);
byte[] bs = new byte[length];
buffer.readBytes(bs);
return new String(bs, Charset.forName("US-ASCII"));
}
private static void writeString(String s, ByteBuf buffer) {
int length = Math.min(s.length(), 255);
buffer.writeByte(length);
buffer.writeBytes(s.getBytes(Charset.forName("US-ASCII")), 0, length);
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cp.internal.session;
import com.hazelcast.config.Config;
import com.hazelcast.config.cp.CPSubsystemConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.cp.internal.HazelcastRaftTestSupport;
import com.hazelcast.cp.internal.RaftGroupId;
import com.hazelcast.cp.internal.RaftInvocationManager;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import com.hazelcast.test.ChangeLoggingRule;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static com.hazelcast.cp.internal.session.AbstractProxySessionManager.NO_SESSION_ID;
import static com.hazelcast.internal.util.ConcurrencyUtil.CALLER_RUNS;
import static com.hazelcast.test.Accessors.getNodeEngineImpl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public abstract class AbstractProxySessionManagerTest extends HazelcastRaftTestSupport {
@ClassRule
public static ChangeLoggingRule changeLoggingRule = new ChangeLoggingRule("log4j2-debug-cp.xml");
private static final int sessionTTLSeconds = 10;
HazelcastInstance[] members;
protected RaftGroupId groupId;
@Before
public void setup() throws ExecutionException, InterruptedException {
members = newInstances(3);
RaftInvocationManager invocationManager = getRaftInvocationManager(members[0]);
groupId = invocationManager.createRaftGroup("group").get();
}
@Test
public void getSession_returnsNoSessionId_whenNoSessionCreated() {
AbstractProxySessionManager sessionManager = getSessionManager();
assertEquals(NO_SESSION_ID, sessionManager.getSession(groupId));
}
@Test
public void acquireSession_createsNewSession_whenSessionNotExists() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
assertNotEquals(NO_SESSION_ID, sessionId);
assertEquals(sessionId, sessionManager.getSession(groupId));
assertEquals(1, sessionManager.getSessionAcquireCount(groupId, sessionId));
SessionAccessor sessionAccessor = getSessionAccessor();
assertTrueEventually(() -> assertTrue(sessionAccessor.isActive(groupId, sessionId)));
}
@Test
public void acquireSession_returnsExistingSession_whenSessionExists() {
AbstractProxySessionManager sessionManager = getSessionManager();
long newSessionId = sessionManager.acquireSession(groupId);
long sessionId = sessionManager.acquireSession(groupId);
assertEquals(newSessionId, sessionId);
assertEquals(sessionId, sessionManager.getSession(groupId));
assertEquals(2, sessionManager.getSessionAcquireCount(groupId, sessionId));
}
@Test
public void acquireSession_returnsTheSameSessionId_whenExecutedConcurrently() throws Exception {
AbstractProxySessionManager sessionManager = getSessionManager();
Callable<Long> acquireSessionCall = () -> sessionManager.acquireSession(groupId);
Future<Long>[] futures = new Future[5];
for (int i = 0; i < futures.length; i++) {
futures[i] = spawn(acquireSessionCall);
}
long[] sessions = new long[futures.length];
for (int i = 0; i < futures.length; i++) {
sessions[i] = futures[i].get();
}
long expectedSessionId = sessionManager.getSession(groupId);
for (long sessionId : sessions) {
assertEquals(expectedSessionId, sessionId);
}
assertEquals(sessions.length, sessionManager.getSessionAcquireCount(groupId, expectedSessionId));
}
@Test
public void releaseSession_hasNoEffect_whenSessionNotExists() {
AbstractProxySessionManager sessionManager = getSessionManager();
sessionManager.releaseSession(groupId, 1);
}
@Test
public void releaseSession_whenSessionExists() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
sessionManager.releaseSession(groupId, sessionId);
assertEquals(0, sessionManager.getSessionAcquireCount(groupId, sessionId));
}
@Test
public void sessionHeartbeatsAreNotSent_whenSessionNotExists() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = 1;
assertTrueAllTheTime(() -> verify(sessionManager, never()).heartbeat(groupId, sessionId), 5);
}
@Test
public void sessionHeartbeatsAreSent_whenSessionInUse() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
SessionAccessor sessionAccessor = getSessionAccessor();
int heartbeatCount = 5;
for (int i = 0; i < heartbeatCount; i++) {
int times = i + 1;
assertTrueEventually(() -> verify(sessionManager, atLeast(times)).heartbeat(groupId, sessionId));
assertTrue(sessionAccessor.isActive(groupId, sessionId));
}
}
@Test
public void sessionHeartbeatsAreNotSent_whenSessionReleased() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
assertTrueEventually(() -> verify(sessionManager, atLeastOnce()).heartbeat(groupId, sessionId));
sessionManager.releaseSession(groupId, sessionId);
SessionAccessor sessionAccessor = getSessionAccessor();
assertTrueEventually(() -> assertFalse(sessionAccessor.isActive(groupId, sessionId)));
}
@Test
public void acquireSession_returnsTheExistingSession_whenSessionInUse() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
when(sessionManager.heartbeat(groupId, sessionId)).thenReturn(completedFuture());
SessionAccessor sessionAccessor = getSessionAccessor();
assertTrueEventually(() -> assertFalse(sessionAccessor.isActive(groupId, sessionId)));
assertTrueAllTheTime(() -> assertEquals(sessionId, sessionManager.acquireSession(groupId)), 3);
}
@Test
public void acquireSession_returnsNewSession_whenSessionExpiredAndNotInUse() {
AbstractProxySessionManager sessionManager = getSessionManager();
long sessionId = sessionManager.acquireSession(groupId);
when(sessionManager.heartbeat(groupId, sessionId)).thenReturn(completedFuture());
SessionAccessor sessionAccessor = getSessionAccessor();
assertTrueEventually(() -> assertFalse(sessionAccessor.isActive(groupId, sessionId)));
sessionManager.releaseSession(groupId, sessionId);
assertTrueEventually(() -> {
long newSessionId = sessionManager.acquireSession(groupId);
sessionManager.releaseSession(groupId, newSessionId);
assertNotEquals(sessionId, newSessionId);
});
}
protected abstract AbstractProxySessionManager getSessionManager();
private InternalCompletableFuture<Object> completedFuture() {
return InternalCompletableFuture.newCompletedFuture(null, CALLER_RUNS);
}
private SessionAccessor getSessionAccessor() {
HazelcastInstance leaderInstance = getLeaderInstance(members, groupId);
return getNodeEngineImpl(leaderInstance).getService(RaftSessionService.SERVICE_NAME);
}
@Override
protected Config createConfig(int cpNodeCount, int groupSize) {
Config config = super.createConfig(cpNodeCount, groupSize);
CPSubsystemConfig cpSubsystemConfig = config.getCPSubsystemConfig();
cpSubsystemConfig.setSessionHeartbeatIntervalSeconds(1);
cpSubsystemConfig.setSessionTimeToLiveSeconds(sessionTTLSeconds);
return config;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.openwire.commands;
import java.util.ArrayList;
import java.util.List;
/**
* @openwire:marshaller code="5"
*/
public class ConsumerInfo extends BaseCommand {
public static final byte DATA_STRUCTURE_TYPE = CommandTypes.CONSUMER_INFO;
public static final byte HIGH_PRIORITY = 10;
public static final byte NORMAL_PRIORITY = 0;
public static final byte NETWORK_CONSUMER_PRIORITY = -5;
public static final byte LOW_PRIORITY = -10;
protected ConsumerId consumerId;
protected OpenWireDestination destination;
protected int prefetchSize;
protected int maximumPendingMessageLimit;
protected boolean browser;
protected boolean dispatchAsync;
protected String selector;
protected String clientId;
protected String subscriptionName;
protected boolean noLocal;
protected boolean exclusive;
protected boolean retroactive;
protected byte priority;
protected BrokerId[] brokerPath;
protected boolean optimizedAcknowledge;
protected boolean noRangeAcks;
// Network connector values should not be serialized.
protected transient boolean networkSubscription;
protected transient List<ConsumerId> networkConsumerIds;
protected Object additionalPredicate;
public ConsumerInfo() {
}
public ConsumerInfo(ConsumerId consumerId) {
this.consumerId = consumerId;
}
public ConsumerInfo(SessionInfo sessionInfo, long consumerId) {
this.consumerId = new ConsumerId(sessionInfo.getSessionId(), consumerId);
}
public ConsumerInfo copy() {
ConsumerInfo info = new ConsumerInfo();
copy(info);
return info;
}
public void copy(ConsumerInfo info) {
super.copy(info);
info.consumerId = consumerId;
info.destination = destination;
info.prefetchSize = prefetchSize;
info.maximumPendingMessageLimit = maximumPendingMessageLimit;
info.browser = browser;
info.dispatchAsync = dispatchAsync;
info.selector = selector;
info.clientId = clientId;
info.subscriptionName = subscriptionName;
info.noLocal = noLocal;
info.exclusive = exclusive;
info.retroactive = retroactive;
info.priority = priority;
info.brokerPath = brokerPath;
info.networkSubscription = networkSubscription;
}
public boolean isDurable() {
return subscriptionName != null;
}
@Override
public byte getDataStructureType() {
return DATA_STRUCTURE_TYPE;
}
/**
* Is used to uniquely identify the consumer to the broker.
*
* @openwire:property version=1 cache=true
*/
public ConsumerId getConsumerId() {
return consumerId;
}
public void setConsumerId(ConsumerId consumerId) {
this.consumerId = consumerId;
}
/**
* Is this consumer a queue browser?
*
* @openwire:property version=1
*/
public boolean isBrowser() {
return browser;
}
public void setBrowser(boolean browser) {
this.browser = browser;
}
/**
* The destination that the consumer is interested in receiving messages
* from. This destination could be a composite destination.
*
* @openwire:property version=1 cache=true
*/
public OpenWireDestination getDestination() {
return destination;
}
public void setDestination(OpenWireDestination destination) {
this.destination = destination;
}
/**
* How many messages a broker will send to the client without receiving an
* ack before he stops dispatching messages to the client.
*
* @openwire:property version=1
*/
public int getPrefetchSize() {
return prefetchSize;
}
public void setPrefetchSize(int prefetchSize) {
this.prefetchSize = prefetchSize;
}
/**
* How many messages a broker will keep around, above the prefetch limit,
* for non-durable topics before starting to discard older messages.
*
* @openwire:property version=1
*/
public int getMaximumPendingMessageLimit() {
return maximumPendingMessageLimit;
}
public void setMaximumPendingMessageLimit(int maximumPendingMessageLimit) {
this.maximumPendingMessageLimit = maximumPendingMessageLimit;
}
/**
* Should the broker dispatch a message to the consumer async? If he does it
* async, then he uses a more SEDA style of processing while if it is not
* done async, then he broker use a STP style of processing. STP is more
* appropriate in high bandwidth situations or when being used by and in vm
* transport.
*
* @openwire:property version=1
*/
public boolean isDispatchAsync() {
return dispatchAsync;
}
public void setDispatchAsync(boolean dispatchAsync) {
this.dispatchAsync = dispatchAsync;
}
/**
* The JMS selector used to filter out messages that this consumer is
* interested in.
*
* @openwire:property version=1
*/
public String getSelector() {
return selector;
}
public void setSelector(String selector) {
this.selector = selector;
}
/**
* Used to identify the id of a client connection.
*
* @openwire:property version=10
*/
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* Used to identify the name of a durable subscription.
*
* @openwire:property version=1
*/
public String getSubscriptionName() {
return subscriptionName;
}
public void setSubscriptionName(String durableSubscriptionId) {
this.subscriptionName = durableSubscriptionId;
}
/**
* Set noLocal to true to avoid receiving messages that were published
* locally on the same connection.
*
* @openwire:property version=1
*/
public boolean isNoLocal() {
return noLocal;
}
public void setNoLocal(boolean noLocal) {
this.noLocal = noLocal;
}
/**
* An exclusive consumer locks out other consumers from being able to
* receive messages from the destination. If there are multiple exclusive
* consumers for a destination, the first one created will be the exclusive
* consumer of the destination.
*
* @openwire:property version=1
*/
public boolean isExclusive() {
return exclusive;
}
public void setExclusive(boolean exclusive) {
this.exclusive = exclusive;
}
/**
* A retroactive consumer only has meaning for Topics. It allows a consumer
* to retroactively see messages sent prior to the consumer being created.
* If the consumer is not durable, it will be delivered the last message
* published to the topic. If the consumer is durable then it will receive
* all persistent messages that are still stored in persistent storage for
* that topic.
*
* @openwire:property version=1
*/
public boolean isRetroactive() {
return retroactive;
}
public void setRetroactive(boolean retroactive) {
this.retroactive = retroactive;
}
public RemoveInfo createRemoveCommand() {
RemoveInfo command = new RemoveInfo(getConsumerId());
command.setResponseRequired(isResponseRequired());
return command;
}
/**
* The broker will avoid dispatching to a lower priority consumer if there
* are other higher priority consumers available to dispatch to. This allows
* letting the broker to have an affinity to higher priority consumers.
* Default priority is 0.
*
* @openwire:property version=1
*/
public byte getPriority() {
return priority;
}
public void setPriority(byte priority) {
this.priority = priority;
}
/**
* The route of brokers the command has moved through.
*
* @openwire:property version=1 cache=true
*/
public BrokerId[] getBrokerPath() {
return brokerPath;
}
public void setBrokerPath(BrokerId[] brokerPath) {
this.brokerPath = brokerPath;
}
/**
* A transient additional predicate that can be used it inject additional
* predicates into the selector on the fly. Handy if if say a Security
* Broker interceptor wants to filter out messages based on security level
* of the consumer.
*
* @openwire:property version=1
*/
public Object getAdditionalPredicate() {
return additionalPredicate;
}
public void setAdditionalPredicate(Object additionalPredicate) {
this.additionalPredicate = additionalPredicate;
}
@Override
public Response visit(CommandVisitor visitor) throws Exception {
return visitor.processAddConsumer(this);
}
/**
* @openwire:property version=1
* @return Returns the networkSubscription.
*/
public boolean isNetworkSubscription() {
return networkSubscription;
}
/**
* @param networkSubscription The networkSubscription to set.
*/
public void setNetworkSubscription(boolean networkSubscription) {
this.networkSubscription = networkSubscription;
}
/**
* @openwire:property version=1
* @return Returns the optimizedAcknowledge.
*/
public boolean isOptimizedAcknowledge() {
return optimizedAcknowledge;
}
/**
* @param optimizedAcknowledge The optimizedAcknowledge to set.
*/
public void setOptimizedAcknowledge(boolean optimizedAcknowledge) {
this.optimizedAcknowledge = optimizedAcknowledge;
}
/**
* The broker may be able to optimize it's processing or provides better QOS
* if it knows the consumer will not be sending ranged acks.
*
* @return true if the consumer will not send range acks.
* @openwire:property version=1
*/
public boolean isNoRangeAcks() {
return noRangeAcks;
}
public void setNoRangeAcks(boolean noRangeAcks) {
this.noRangeAcks = noRangeAcks;
}
/**
* Tracks the original subscription id that causes a subscription to
* percolate through a network when networkTTL > 1. Tracking the original
* subscription allows duplicate suppression.
*
* @return array of the current subscription path
* @openwire:property version=4
*/
public ConsumerId[] getNetworkConsumerPath() {
ConsumerId[] result = null;
if (networkConsumerIds != null) {
result = networkConsumerIds.toArray(new ConsumerId[0]);
}
return result;
}
public void setNetworkConsumerPath(ConsumerId[] consumerPath) {
if (consumerPath != null) {
for (int i=0; i<consumerPath.length; i++) {
addNetworkConsumerId(consumerPath[i]);
}
}
}
@Override
public int hashCode() {
return (consumerId == null) ? 0 : consumerId.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ConsumerInfo other = (ConsumerInfo) obj;
if (consumerId == null && other.consumerId != null) {
return false;
} else if (!consumerId.equals(other.consumerId)) {
return false;
}
return true;
}
public synchronized void addNetworkConsumerId(ConsumerId networkConsumerId) {
if (networkConsumerIds == null) {
networkConsumerIds = new ArrayList<ConsumerId>();
}
networkConsumerIds.add(networkConsumerId);
}
public synchronized void removeNetworkConsumerId(ConsumerId networkConsumerId) {
if (networkConsumerIds != null) {
networkConsumerIds.remove(networkConsumerId);
if (networkConsumerIds.isEmpty()) {
networkConsumerIds=null;
}
}
}
@Override
public boolean isConsumerInfo() {
return true;
}
}
| |
/**
* Copyright 2013 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package org.fusesource.camel.component.sap.model.rfc.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.fusesource.camel.component.sap.model.SAPEditPlugin;
import org.fusesource.camel.component.sap.model.rfc.RfcFactory;
import org.fusesource.camel.component.sap.model.rfc.RfcPackage;
import org.fusesource.camel.component.sap.model.rfc.ServerData;
/**
* This is the item provider adapter for a {@link org.fusesource.camel.component.sap.model.rfc.ServerData} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class ServerDataItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ServerDataItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addGwhostPropertyDescriptor(object);
addGwservPropertyDescriptor(object);
addProgidPropertyDescriptor(object);
addConnectionCountPropertyDescriptor(object);
addSaprouterPropertyDescriptor(object);
addMaxStartUpDelayPropertyDescriptor(object);
addRepositoryDestinationPropertyDescriptor(object);
addRepositoryMapPropertyDescriptor(object);
addTracePropertyDescriptor(object);
addWorkerThreadCountPropertyDescriptor(object);
addWorkerThreadMinCountPropertyDescriptor(object);
addSncModePropertyDescriptor(object);
addSncQopPropertyDescriptor(object);
addSncMynamePropertyDescriptor(object);
addSncLibPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Gwhost feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addGwhostPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_gwhost_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_gwhost_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__GWHOST,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Gwserv feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addGwservPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_gwserv_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_gwserv_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__GWSERV,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Progid feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addProgidPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_progid_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_progid_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__PROGID,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Connection Count feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addConnectionCountPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_connectionCount_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_connectionCount_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__CONNECTION_COUNT,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Saprouter feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSaprouterPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_saprouter_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_saprouter_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__SAPROUTER,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Max Start Up Delay feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addMaxStartUpDelayPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_maxStartUpDelay_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_maxStartUpDelay_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__MAX_START_UP_DELAY,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Repository Destination feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addRepositoryDestinationPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_repositoryDestination_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_repositoryDestination_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__REPOSITORY_DESTINATION,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Repository Map feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addRepositoryMapPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_repositoryMap_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_repositoryMap_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__REPOSITORY_MAP,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Trace feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addTracePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_trace_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_trace_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__TRACE,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Worker Thread Count feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addWorkerThreadCountPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_workerThreadCount_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_workerThreadCount_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__WORKER_THREAD_COUNT,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Worker Thread Min Count feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addWorkerThreadMinCountPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_workerThreadMinCount_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_workerThreadMinCount_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__WORKER_THREAD_MIN_COUNT,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Snc Mode feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSncModePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_sncMode_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_sncMode_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__SNC_MODE,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Snc Qop feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSncQopPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_sncQop_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_sncQop_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__SNC_QOP,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Snc Myname feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSncMynamePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_sncMyname_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_sncMyname_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__SNC_MYNAME,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Snc Lib feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addSncLibPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ServerData_sncLib_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ServerData_sncLib_feature", "_UI_ServerData_type"),
RfcPackage.Literals.SERVER_DATA__SNC_LIB,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(RfcPackage.Literals.SERVER_DATA__ENTRIES);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns ServerData.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/ServerData"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((ServerData)object).getSncMyname();
return label == null || label.length() == 0 ?
getString("_UI_ServerData_type") :
getString("_UI_ServerData_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(ServerData.class)) {
case RfcPackage.SERVER_DATA__GWHOST:
case RfcPackage.SERVER_DATA__GWSERV:
case RfcPackage.SERVER_DATA__PROGID:
case RfcPackage.SERVER_DATA__CONNECTION_COUNT:
case RfcPackage.SERVER_DATA__SAPROUTER:
case RfcPackage.SERVER_DATA__MAX_START_UP_DELAY:
case RfcPackage.SERVER_DATA__REPOSITORY_DESTINATION:
case RfcPackage.SERVER_DATA__REPOSITORY_MAP:
case RfcPackage.SERVER_DATA__TRACE:
case RfcPackage.SERVER_DATA__WORKER_THREAD_COUNT:
case RfcPackage.SERVER_DATA__WORKER_THREAD_MIN_COUNT:
case RfcPackage.SERVER_DATA__SNC_MODE:
case RfcPackage.SERVER_DATA__SNC_QOP:
case RfcPackage.SERVER_DATA__SNC_MYNAME:
case RfcPackage.SERVER_DATA__SNC_LIB:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case RfcPackage.SERVER_DATA__ENTRIES:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(RfcPackage.Literals.SERVER_DATA__ENTRIES,
RfcFactory.eINSTANCE.create(RfcPackage.Literals.SERVER_DATA_ENTRY)));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return SAPEditPlugin.INSTANCE;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.recyclebin.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rbin-2021-06-15/GetRule" target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetRuleResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The unique ID of the retention rule.
* </p>
*/
private String identifier;
/**
* <p>
* The retention rule description.
* </p>
*/
private String description;
/**
* <p>
* The resource type retained by the retention rule.
* </p>
*/
private String resourceType;
/**
* <p>
* Information about the retention period for which the retention rule is to retain resources.
* </p>
*/
private RetentionPeriod retentionPeriod;
/**
* <p>
* Information about the resource tags used to identify resources that are retained by the retention rule.
* </p>
*/
private java.util.List<ResourceTag> resourceTags;
/**
* <p>
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* </p>
*/
private String status;
/**
* <p>
* The unique ID of the retention rule.
* </p>
*
* @param identifier
* The unique ID of the retention rule.
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* <p>
* The unique ID of the retention rule.
* </p>
*
* @return The unique ID of the retention rule.
*/
public String getIdentifier() {
return this.identifier;
}
/**
* <p>
* The unique ID of the retention rule.
* </p>
*
* @param identifier
* The unique ID of the retention rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRuleResult withIdentifier(String identifier) {
setIdentifier(identifier);
return this;
}
/**
* <p>
* The retention rule description.
* </p>
*
* @param description
* The retention rule description.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The retention rule description.
* </p>
*
* @return The retention rule description.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The retention rule description.
* </p>
*
* @param description
* The retention rule description.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRuleResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The resource type retained by the retention rule.
* </p>
*
* @param resourceType
* The resource type retained by the retention rule.
* @see ResourceType
*/
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
/**
* <p>
* The resource type retained by the retention rule.
* </p>
*
* @return The resource type retained by the retention rule.
* @see ResourceType
*/
public String getResourceType() {
return this.resourceType;
}
/**
* <p>
* The resource type retained by the retention rule.
* </p>
*
* @param resourceType
* The resource type retained by the retention rule.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceType
*/
public GetRuleResult withResourceType(String resourceType) {
setResourceType(resourceType);
return this;
}
/**
* <p>
* The resource type retained by the retention rule.
* </p>
*
* @param resourceType
* The resource type retained by the retention rule.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceType
*/
public GetRuleResult withResourceType(ResourceType resourceType) {
this.resourceType = resourceType.toString();
return this;
}
/**
* <p>
* Information about the retention period for which the retention rule is to retain resources.
* </p>
*
* @param retentionPeriod
* Information about the retention period for which the retention rule is to retain resources.
*/
public void setRetentionPeriod(RetentionPeriod retentionPeriod) {
this.retentionPeriod = retentionPeriod;
}
/**
* <p>
* Information about the retention period for which the retention rule is to retain resources.
* </p>
*
* @return Information about the retention period for which the retention rule is to retain resources.
*/
public RetentionPeriod getRetentionPeriod() {
return this.retentionPeriod;
}
/**
* <p>
* Information about the retention period for which the retention rule is to retain resources.
* </p>
*
* @param retentionPeriod
* Information about the retention period for which the retention rule is to retain resources.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRuleResult withRetentionPeriod(RetentionPeriod retentionPeriod) {
setRetentionPeriod(retentionPeriod);
return this;
}
/**
* <p>
* Information about the resource tags used to identify resources that are retained by the retention rule.
* </p>
*
* @return Information about the resource tags used to identify resources that are retained by the retention rule.
*/
public java.util.List<ResourceTag> getResourceTags() {
return resourceTags;
}
/**
* <p>
* Information about the resource tags used to identify resources that are retained by the retention rule.
* </p>
*
* @param resourceTags
* Information about the resource tags used to identify resources that are retained by the retention rule.
*/
public void setResourceTags(java.util.Collection<ResourceTag> resourceTags) {
if (resourceTags == null) {
this.resourceTags = null;
return;
}
this.resourceTags = new java.util.ArrayList<ResourceTag>(resourceTags);
}
/**
* <p>
* Information about the resource tags used to identify resources that are retained by the retention rule.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setResourceTags(java.util.Collection)} or {@link #withResourceTags(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param resourceTags
* Information about the resource tags used to identify resources that are retained by the retention rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRuleResult withResourceTags(ResourceTag... resourceTags) {
if (this.resourceTags == null) {
setResourceTags(new java.util.ArrayList<ResourceTag>(resourceTags.length));
}
for (ResourceTag ele : resourceTags) {
this.resourceTags.add(ele);
}
return this;
}
/**
* <p>
* Information about the resource tags used to identify resources that are retained by the retention rule.
* </p>
*
* @param resourceTags
* Information about the resource tags used to identify resources that are retained by the retention rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRuleResult withResourceTags(java.util.Collection<ResourceTag> resourceTags) {
setResourceTags(resourceTags);
return this;
}
/**
* <p>
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* </p>
*
* @param status
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* @see RuleStatus
*/
public void setStatus(String status) {
this.status = status;
}
/**
* <p>
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* </p>
*
* @return The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* @see RuleStatus
*/
public String getStatus() {
return this.status;
}
/**
* <p>
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* </p>
*
* @param status
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RuleStatus
*/
public GetRuleResult withStatus(String status) {
setStatus(status);
return this;
}
/**
* <p>
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* </p>
*
* @param status
* The state of the retention rule. Only retention rules that are in the <code>available</code> state retain
* resources.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RuleStatus
*/
public GetRuleResult withStatus(RuleStatus status) {
this.status = status.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIdentifier() != null)
sb.append("Identifier: ").append(getIdentifier()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getResourceType() != null)
sb.append("ResourceType: ").append(getResourceType()).append(",");
if (getRetentionPeriod() != null)
sb.append("RetentionPeriod: ").append(getRetentionPeriod()).append(",");
if (getResourceTags() != null)
sb.append("ResourceTags: ").append(getResourceTags()).append(",");
if (getStatus() != null)
sb.append("Status: ").append(getStatus());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetRuleResult == false)
return false;
GetRuleResult other = (GetRuleResult) obj;
if (other.getIdentifier() == null ^ this.getIdentifier() == null)
return false;
if (other.getIdentifier() != null && other.getIdentifier().equals(this.getIdentifier()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getResourceType() == null ^ this.getResourceType() == null)
return false;
if (other.getResourceType() != null && other.getResourceType().equals(this.getResourceType()) == false)
return false;
if (other.getRetentionPeriod() == null ^ this.getRetentionPeriod() == null)
return false;
if (other.getRetentionPeriod() != null && other.getRetentionPeriod().equals(this.getRetentionPeriod()) == false)
return false;
if (other.getResourceTags() == null ^ this.getResourceTags() == null)
return false;
if (other.getResourceTags() != null && other.getResourceTags().equals(this.getResourceTags()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIdentifier() == null) ? 0 : getIdentifier().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getResourceType() == null) ? 0 : getResourceType().hashCode());
hashCode = prime * hashCode + ((getRetentionPeriod() == null) ? 0 : getRetentionPeriod().hashCode());
hashCode = prime * hashCode + ((getResourceTags() == null) ? 0 : getResourceTags().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
return hashCode;
}
@Override
public GetRuleResult clone() {
try {
return (GetRuleResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package com.eusecom.attendance;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.flowables.ConnectableFlowable;
import rx.Observable;
import rx.Subscriber;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
import rx.subscriptions.CompositeSubscription;
import rx.subscriptions.Subscriptions;
import com.eusecom.attendance.models.Company;
import com.eusecom.attendance.models.Employee;
import com.eusecom.attendance.mvvmmodel.Language;
import com.eusecom.attendance.rxbus.RxBus;
import android.support.design.widget.CoordinatorLayout;
import android.widget.Toast;
import static android.support.design.R.styleable.CoordinatorLayout;
//github https://github.com/florina-muntenescu/DroidconMVVM
//by https://medium.com/upday-devs/android-architecture-patterns-part-3-model-view-viewmodel-e7eeee76b73b
//1. getViewModel
//2. subscriptions for values emited
//3. in MainViewModel interaction with user mViewModel.emitlanguageSelected(languageSelected);
public class EmployeeMvvmActivity extends AppCompatActivity {
@NonNull
private CompositeSubscription mSubscription;
@NonNull
private EmployeeMvvmViewModel mViewModel;
@Nullable
private RecyclerView mRecycler;
private LinearLayoutManager mManager;
private EmployeesRxAdapter mAdapter;
private RxBus _rxBus;
private CompositeDisposable _disposables;
@Nullable
private TextView mGreetingView, mMessageView;
@Nullable
private Spinner mLanguagesSpinner;
@Nullable
private LanguageMvvmSpinnerAdapter mLanguageSpinnerAdapter;
Toolbar mActionBarToolbar;
private CoordinatorLayout coordinatorLayout;
AlertDialog dialog = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mvvm_employees);
coordinatorLayout = (CoordinatorLayout) findViewById(R.id
.coordinatorLayout);
mActionBarToolbar = (Toolbar) findViewById(R.id.tool_bar);
setSupportActionBar(mActionBarToolbar);
getSupportActionBar().setTitle(getString(R.string.action_myemployee));
mViewModel = getEmployeeMvvmViewModel();
_rxBus = ((AttendanceApplication) getApplication()).getRxBusSingleton();
_disposables = new CompositeDisposable();
ConnectableFlowable<Object> tapEventEmitter = _rxBus.asFlowable().publish();
_disposables
.add(tapEventEmitter.subscribe(event -> {
//Log.d("rxBus ", "tapEventEmitter");
if (event instanceof EmployeeMvvmActivity.FobTapEvent) {
Log.d("EmpoloyeeActivity ", " fobClick ");
//attention - activity leaked
//mSubscription.add(getNewEmployeeDialog(getString(R.string.newcompany), getString(R.string.fullfirma))
// .subscribeOn(rx.android.schedulers.AndroidSchedulers.mainThread())
// .observeOn(Schedulers.computation())
// .subscribe(this::setBoolean)
//);;
}
if (event instanceof Employee) {
String keys = ((Employee) event).getUsatw();
//Log.d("In FRGM longClick", keys);
Employee model= (Employee) event;
//Toast.makeText(this, "Longclick " + keys,Toast.LENGTH_SHORT).show();
getEditEmloyeeDialog(model);
}
}));
_disposables
.add(tapEventEmitter.publish(stream ->
stream.buffer(stream.debounce(1, TimeUnit.SECONDS)))
.observeOn(io.reactivex.android.schedulers.AndroidSchedulers.mainThread()).subscribe(taps -> {
///_showTapCount(taps.size()); OK
}));
_disposables.add(tapEventEmitter.connect());
setupViews();
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Toast.makeText(EmployeeMvvmActivity.this, R.string.createemployee, Toast.LENGTH_LONG).show();
}
});
}
private void setupViews() {
mRecycler = (RecyclerView) findViewById(R.id.employees_list);
mRecycler.setHasFixedSize(true);
mManager = new LinearLayoutManager(this);
mManager.setReverseLayout(true);
mManager.setStackFromEnd(true);
mRecycler.setLayoutManager(mManager);
mAdapter = new EmployeesRxAdapter(Collections.<Employee>emptyList(), _rxBus);
mRecycler.setAdapter(mAdapter);
mMessageView = (TextView) findViewById(R.id.message);
mGreetingView = (TextView) findViewById(R.id.greeting);
mLanguagesSpinner = (Spinner) findViewById(R.id.languages);
assert mLanguagesSpinner != null;
mLanguagesSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(final AdapterView<?> parent, final View view,
final int position, final long id) {
itemSelected(position);
}
@Override
public void onNothingSelected(final AdapterView<?> parent) {
//nothing to do here
}
});
}
@Override
public void onDestroy() {
super.onDestroy();
_disposables.dispose();
try {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
dialog=null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void onResume() {
super.onResume();
bind();
}
@Override
protected void onPause() {
super.onPause();
unBind();
}
private void bind() {
mSubscription = new CompositeSubscription();
mSubscription.add(mViewModel.getObservableGreeting()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::setGreeting));
mSubscription.add(mViewModel.getObservableKeyEditedEmployee()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::setMessage));
mSubscription.add(mViewModel.getObservableFob()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::setMessage));
mSubscription.add(mViewModel.getObservableSupportedLanguages()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::setLanguages));
mSubscription.add(mViewModel.getObservableFBusersEmployeeSpinner()
.subscribeOn(Schedulers.computation())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::setEmployees));
}
private void unBind() {
mAdapter.setData(Collections.<Employee>emptyList());
//is better to use mSubscription.clear(); by https://medium.com/@scanarch/how-to-leak-memory-with-subscriptions-in-rxjava-ae0ef01ad361
//mSubscription.unsubscribe();
mSubscription.clear();
}
Observable<Boolean> getNewEmployeeDialog(String title, String message) {
//attention - activity leaked by use observab;e getNewEmployeeDialog
return Observable.create((Subscriber<? super Boolean> subscriber) -> {
LayoutInflater inflater = this.getLayoutInflater();
View textenter = inflater.inflate(R.layout.companies_new_dialog, null);
final EditText namex = (EditText) textenter.findViewById(R.id.namex);
namex.setText("name");
final EditText icox = (EditText) textenter.findViewById(R.id.icox);
icox.setText("12345678");
final EditText cityx = (EditText) textenter.findViewById(R.id.cityx);
cityx.setText("city");
dialog = new AlertDialog.Builder(this)
.setView(textenter)
.setTitle(title)
//.setMessage(message)
.setPositiveButton(getString(R.string.save), (dialog, which) -> {
String namexx = namex.getText().toString();
String icoxx = icox.getText().toString();
String cityxx = cityx.getText().toString();
Company newCompany = new Company(icoxx, namexx, " ", "0", cityxx);
//mViewModel.saveNewCompany(newCompany);
try {
subscriber.onNext(true);
subscriber.onCompleted();
} catch (Exception e) {
subscriber.onError(e);
e.printStackTrace();
}
})
.setNegativeButton(getString(R.string.cancel), (dialog, which) -> {
try {
subscriber.onNext(false);
subscriber.onCompleted();
} catch (Exception e) {
subscriber.onError(e);
e.printStackTrace();
}
})
.create();
// cleaning up
subscriber.add(Subscriptions.create(dialog::dismiss));
//textenter = null;
dialog.show();
});
}
private void getEditEmloyeeDialog(@NonNull final Employee employee) {
String keys = employee.getUsatw();
//Log.d("In editDialog ", keys);
LayoutInflater inflater = LayoutInflater.from(this);
final View textenter = inflater.inflate(R.layout.employee_edit_dialog, null);
final EditText namex = (EditText) textenter.findViewById(R.id.namex);
namex.setText(employee.username);
final EditText oscx = (EditText) textenter.findViewById(R.id.oscx);
oscx.setText(employee.usosc);
final EditText icox = (EditText) textenter.findViewById(R.id.icox);
icox.setText(employee.usico);
final EditText typx = (EditText) textenter.findViewById(R.id.typx);
typx.setText(employee.ustype);
final EditText uswx = (EditText) textenter.findViewById(R.id.uswx);
uswx.setText("0");
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setView(textenter).setTitle(employee.email);
builder.setPositiveButton(getString(R.string.save), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
String namexx = namex.getText().toString();
String oscxx = oscx.getText().toString();
String icoxx = icox.getText().toString();
String typxx = typx.getText().toString();
String uswxx = uswx.getText().toString();
mViewModel.saveEditEmloyee(employee, namexx, oscxx, icoxx, typxx, uswxx);
}
})
.setNegativeButton(getString(R.string.cancel), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog dialog = builder.create();
builder.show();
}
private void setBoolean(@NonNull final Boolean booleanx) {
Log.i("setBoolean ", valueOf(booleanx));
}
public static String valueOf(Object obj) {
return (obj == null) ? "null" : obj.toString();
}
private void setEmployees(@NonNull final List<Employee> employees) {
assert mRecycler != null;
mAdapter.setData(employees);
}
private void setMessage(@NonNull final String message) {
//Log.i("setMessage ", "method ");
final String messagex = getString(R.string.saved) + " " + message;
//field_title.setText(message);
Snackbar snackbar = Snackbar
.make(coordinatorLayout, messagex, Snackbar.LENGTH_LONG);
snackbar.show();
}
//spinner methods
private void setGreeting(@NonNull final String greeting) {
assert mGreetingView != null;
mGreetingView.setText(greeting);
String usicox = SettingsActivity.getUsIco(EmployeeMvvmActivity.this);
String[] conditionsx = {
greeting,
usicox
};
mViewModel.getBySpinnerEmloyee(conditionsx);
}
private void setLanguages(@NonNull final List<Language> languages) {
assert mLanguagesSpinner != null;
mLanguageSpinnerAdapter = new LanguageMvvmSpinnerAdapter(this,
R.layout.employee_mvvm_spinner_item,
languages);
mLanguagesSpinner.setAdapter(mLanguageSpinnerAdapter);
}
private void itemSelected(final int position) {
assert mLanguageSpinnerAdapter != null;
Language languageSelected = mLanguageSpinnerAdapter.getItem(position);
mViewModel.emitlanguageSelected(languageSelected);
}
public static class FobTapEvent {}
@NonNull
private EmployeeMvvmViewModel getEmployeeMvvmViewModel() {
return ((AttendanceApplication) getApplication()).getEmployeeMvvmViewModel();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.InvalidResourceBlacklistRequestException;
import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MockRMWithAMS;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MyContainerManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.Assert;
import org.junit.Test;
public class TestSchedulerUtils {
private static final Log LOG = LogFactory.getLog(TestSchedulerUtils.class);
@Test (timeout = 30000)
public void testNormalizeRequest() {
ResourceCalculator resourceCalculator = new DefaultResourceCalculator();
final int minMemory = 1024;
final int maxMemory = 8192;
Resource minResource = Resources.createResource(minMemory, 0);
Resource maxResource = Resources.createResource(maxMemory, 0);
ResourceRequest ask = new ResourceRequestPBImpl();
// case negative memory
ask.setCapability(Resources.createResource(-1024));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(minMemory, ask.getCapability().getMemory());
// case zero memory
ask.setCapability(Resources.createResource(0));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(minMemory, ask.getCapability().getMemory());
// case memory is a multiple of minMemory
ask.setCapability(Resources.createResource(2 * minMemory));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(2 * minMemory, ask.getCapability().getMemory());
// case memory is not a multiple of minMemory
ask.setCapability(Resources.createResource(minMemory + 10));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(2 * minMemory, ask.getCapability().getMemory());
// case memory is equal to max allowed
ask.setCapability(Resources.createResource(maxMemory));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(maxMemory, ask.getCapability().getMemory());
// case memory is just less than max
ask.setCapability(Resources.createResource(maxMemory - 10));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(maxMemory, ask.getCapability().getMemory());
// max is not a multiple of min
maxResource = Resources.createResource(maxMemory - 10, 0);
ask.setCapability(Resources.createResource(maxMemory - 100));
// multiple of minMemory > maxMemory, then reduce to maxMemory
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(maxResource.getMemory(), ask.getCapability().getMemory());
// ask is more than max
maxResource = Resources.createResource(maxMemory, 0);
ask.setCapability(Resources.createResource(maxMemory + 100));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, null, minResource,
maxResource);
assertEquals(maxResource.getMemory(), ask.getCapability().getMemory());
}
@Test (timeout = 30000)
public void testNormalizeRequestWithDominantResourceCalculator() {
ResourceCalculator resourceCalculator = new DominantResourceCalculator();
Resource minResource = Resources.createResource(1024, 1);
Resource maxResource = Resources.createResource(10240, 10);
Resource clusterResource = Resources.createResource(10 * 1024, 10);
ResourceRequest ask = new ResourceRequestPBImpl();
// case negative memory/vcores
ask.setCapability(Resources.createResource(-1024, -1));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, clusterResource, minResource, maxResource);
assertEquals(minResource, ask.getCapability());
// case zero memory/vcores
ask.setCapability(Resources.createResource(0, 0));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, clusterResource, minResource, maxResource);
assertEquals(minResource, ask.getCapability());
assertEquals(1, ask.getCapability().getVirtualCores());
assertEquals(1024, ask.getCapability().getMemory());
// case non-zero memory & zero cores
ask.setCapability(Resources.createResource(1536, 0));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, clusterResource, minResource, maxResource);
assertEquals(Resources.createResource(2048, 1), ask.getCapability());
assertEquals(1, ask.getCapability().getVirtualCores());
assertEquals(2048, ask.getCapability().getMemory());
}
@Test (timeout = 30000)
public void testValidateResourceRequest() {
Resource maxResource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
// zero memory
try {
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Zero memory should be accepted");
}
// zero vcores
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
0);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Zero vcores should be accepted");
}
// max memory
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Max memory should be accepted");
}
// max vcores
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Max vcores should not be accepted");
}
// negative memory
try {
Resource resource = Resources.createResource(
-1,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
fail("Negative memory should not be accepted");
} catch (InvalidResourceRequestException e) {
// expected
}
// negative vcores
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
-1);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
fail("Negative vcores should not be accepted");
} catch (InvalidResourceRequestException e) {
// expected
}
// more than max memory
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + 1,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
fail("More than max memory should not be accepted");
} catch (InvalidResourceRequestException e) {
// expected
}
// more than max vcores
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES
+ 1);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
SchedulerUtils.validateResourceRequest(resReq, maxResource);
fail("More than max vcores should not be accepted");
} catch (InvalidResourceRequestException e) {
// expected
}
}
@Test
public void testValidateResourceBlacklistRequest() throws Exception {
MyContainerManager containerManager = new MyContainerManager();
final MockRMWithAMS rm =
new MockRMWithAMS(new YarnConfiguration(), containerManager);
rm.start();
MockNM nm1 = rm.registerNode("localhost:1234", 5120);
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(2);
acls.put(ApplicationAccessType.VIEW_APP, "*");
RMApp app = rm.submitApp(1024, "appname", "appuser", acls);
nm1.nodeHeartbeat(true);
RMAppAttempt attempt = app.getCurrentAppAttempt();
ApplicationAttemptId applicationAttemptId = attempt.getAppAttemptId();
waitForLaunchedState(attempt);
// Create a client to the RM.
final Configuration conf = rm.getConfig();
final YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser =
UserGroupInformation.createRemoteUser(applicationAttemptId.toString());
Credentials credentials = containerManager.getContainerCredentials();
final InetSocketAddress rmBindAddress =
rm.getApplicationMasterService().getBindAddress();
Token<? extends TokenIdentifier> amRMToken =
MockRMWithAMS.setupAndReturnAMRMToken(rmBindAddress,
credentials.getAllTokens());
currentUser.addToken(amRMToken);
ApplicationMasterProtocol client =
currentUser.doAs(new PrivilegedAction<ApplicationMasterProtocol>() {
@Override
public ApplicationMasterProtocol run() {
return (ApplicationMasterProtocol) rpc.getProxy(
ApplicationMasterProtocol.class, rmBindAddress, conf);
}
});
RegisterApplicationMasterRequest request = Records
.newRecord(RegisterApplicationMasterRequest.class);
client.registerApplicationMaster(request);
ResourceBlacklistRequest blacklistRequest =
ResourceBlacklistRequest.newInstance(
Collections.singletonList(ResourceRequest.ANY), null);
AllocateRequest allocateRequest =
AllocateRequest.newInstance(0, 0.0f, null, null, blacklistRequest);
boolean error = false;
try {
client.allocate(allocateRequest);
} catch (InvalidResourceBlacklistRequestException e) {
error = true;
}
rm.stop();
Assert.assertTrue(
"Didn't not catch InvalidResourceBlacklistRequestException", error);
}
private void waitForLaunchedState(RMAppAttempt attempt)
throws InterruptedException {
int waitCount = 0;
while (attempt.getAppAttemptState() != RMAppAttemptState.LAUNCHED
&& waitCount++ < 20) {
LOG.info("Waiting for AppAttempt to reach LAUNCHED state. "
+ "Current state is " + attempt.getAppAttemptState());
Thread.sleep(1000);
}
Assert.assertEquals(attempt.getAppAttemptState(),
RMAppAttemptState.LAUNCHED);
}
@Test
public void testComparePriorities(){
Priority high = Priority.newInstance(1);
Priority low = Priority.newInstance(2);
assertTrue(high.compareTo(low) > 0);
}
@Test
public void testCreateAbnormalContainerStatus() {
ContainerStatus cd = SchedulerUtils.createAbnormalContainerStatus(
ContainerId.newInstance(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x");
Assert.assertEquals(ContainerExitStatus.ABORTED, cd.getExitStatus());
}
@Test
public void testCreatePreemptedContainerStatus() {
ContainerStatus cd = SchedulerUtils.createPreemptedContainerStatus(
ContainerId.newInstance(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x");
Assert.assertEquals(ContainerExitStatus.PREEMPTED, cd.getExitStatus());
}
public static <T> SchedulerApplication verifyAppAddedAndRemovedFromScheduler(
final Map<ApplicationId, SchedulerApplication> applications,
EventHandler<SchedulerEvent> handler, String queueName) throws Exception {
ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
AppAddedSchedulerEvent appAddedEvent =
new AppAddedSchedulerEvent(appId, queueName, "user");
handler.handle(appAddedEvent);
SchedulerApplication app = applications.get(appId);
// verify application is added.
Assert.assertNotNull(app);
Assert.assertEquals("user", app.getUser());
AppRemovedSchedulerEvent appRemoveEvent =
new AppRemovedSchedulerEvent(appId, RMAppState.FINISHED);
handler.handle(appRemoveEvent);
Assert.assertNull(applications.get(appId));
return app;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.IOException;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.cql3.CFDefinition;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.utils.Allocator;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.HeapAllocator;
/**
* Column is immutable, which prevents all kinds of confusion in a multithreaded environment.
* (TODO: look at making SuperColumn immutable too. This is trickier but is probably doable
* with something like PCollections -- http://code.google.com
*/
public class Column implements IColumn
{
private static final ColumnSerializer serializer = new ColumnSerializer();
private static final OnDiskAtom.Serializer onDiskSerializer = new OnDiskAtom.Serializer(serializer);
public static ColumnSerializer serializer()
{
return serializer;
}
public static OnDiskAtom.Serializer onDiskSerializer()
{
return onDiskSerializer;
}
protected final ByteBuffer name;
protected final ByteBuffer value;
protected final long timestamp;
Column(ByteBuffer name)
{
this(name, ByteBufferUtil.EMPTY_BYTE_BUFFER);
}
public Column(ByteBuffer name, ByteBuffer value)
{
this(name, value, 0);
}
public Column(ByteBuffer name, ByteBuffer value, long timestamp)
{
assert name != null;
assert value != null;
assert name.remaining() <= IColumn.MAX_NAME_LENGTH;
this.name = name;
this.value = value;
this.timestamp = timestamp;
}
public ByteBuffer name()
{
return name;
}
public Column getSubColumn(ByteBuffer columnName)
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public ByteBuffer value()
{
return value;
}
public Collection<IColumn> getSubColumns()
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public long timestamp()
{
return timestamp;
}
public long minTimestamp()
{
return timestamp;
}
public long maxTimestamp()
{
return timestamp;
}
public boolean isMarkedForDelete()
{
return (int) (System.currentTimeMillis() / 1000) >= getLocalDeletionTime();
}
public long getMarkedForDeleteAt()
{
throw new IllegalStateException("column is not marked for delete");
}
public long mostRecentLiveChangeAt()
{
return timestamp;
}
public long mostRecentNonGCableChangeAt(int gcbefore)
{
return timestamp;
}
public int dataSize()
{
return name().remaining() + value.remaining() + TypeSizes.NATIVE.sizeof(timestamp);
}
public int serializedSize(TypeSizes typeSizes)
{
/*
* Size of a column is =
* size of a name (short + length of the string)
* + 1 byte to indicate if the column has been deleted
* + 8 bytes for timestamp
* + 4 bytes which basically indicates the size of the byte array
* + entire byte array.
*/
int nameSize = name.remaining();
int valueSize = value.remaining();
return typeSizes.sizeof((short) nameSize) + nameSize + 1 + typeSizes.sizeof(timestamp) + typeSizes.sizeof(valueSize) + valueSize;
}
public long serializedSizeForSSTable()
{
return serializedSize(TypeSizes.NATIVE);
}
public int serializationFlags()
{
return 0;
}
public void addColumn(IColumn column)
{
addColumn(null, null);
}
public void addColumn(IColumn column, Allocator allocator)
{
throw new UnsupportedOperationException("This operation is not supported for simple columns.");
}
public IColumn diff(IColumn column)
{
if (timestamp() < column.timestamp())
{
return column;
}
return null;
}
public void updateDigest(MessageDigest digest)
{
digest.update(name.duplicate());
digest.update(value.duplicate());
DataOutputBuffer buffer = new DataOutputBuffer();
try
{
buffer.writeLong(timestamp);
buffer.writeByte(serializationFlags());
}
catch (IOException e)
{
throw new RuntimeException(e);
}
digest.update(buffer.getData(), 0, buffer.getLength());
}
public int getLocalDeletionTime()
{
return Integer.MAX_VALUE;
}
public IColumn reconcile(IColumn column)
{
return reconcile(column, HeapAllocator.instance);
}
public IColumn reconcile(IColumn column, Allocator allocator)
{
// tombstones take precedence. (if both are tombstones, then it doesn't matter which one we use.)
if (isMarkedForDelete())
return timestamp() < column.timestamp() ? column : this;
if (column.isMarkedForDelete())
return timestamp() > column.timestamp() ? this : column;
// break ties by comparing values.
if (timestamp() == column.timestamp())
return value().compareTo(column.value()) < 0 ? column : this;
// neither is tombstoned and timestamps are different
return timestamp() < column.timestamp() ? column : this;
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Column column = (Column)o;
if (timestamp != column.timestamp)
return false;
if (!name.equals(column.name))
return false;
return value.equals(column.value);
}
@Override
public int hashCode()
{
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + (int)(timestamp ^ (timestamp >>> 32));
return result;
}
public IColumn localCopy(ColumnFamilyStore cfs)
{
return localCopy(cfs, HeapAllocator.instance);
}
public IColumn localCopy(ColumnFamilyStore cfs, Allocator allocator)
{
return new Column(cfs.internOrCopy(name, allocator), allocator.clone(value), timestamp);
}
public String getString(AbstractType<?> comparator)
{
StringBuilder sb = new StringBuilder();
sb.append(comparator.getString(name));
sb.append(":");
sb.append(isMarkedForDelete());
sb.append(":");
sb.append(value.remaining());
sb.append("@");
sb.append(timestamp());
return sb.toString();
}
public boolean isLive()
{
return !isMarkedForDelete();
}
protected void validateName(CFMetaData metadata) throws MarshalException
{
AbstractType<?> nameValidator = metadata.cfType == ColumnFamilyType.Super ? metadata.subcolumnComparator : metadata.comparator;
nameValidator.validate(name());
}
public void validateFields(CFMetaData metadata) throws MarshalException
{
validateName(metadata);
CFDefinition cfdef = metadata.getCfDef();
// If this is a CQL table, we need to pull out the CQL column name to look up the correct column type.
// (Note that COMPACT composites are handled by validateName, above.)
ByteBuffer internalName;
internalName = (cfdef.isComposite && !cfdef.isCompact)
? ((CompositeType) metadata.comparator).extractLastComponent(name)
: name;
AbstractType<?> valueValidator = metadata.getValueValidator(internalName);
if (valueValidator != null)
valueValidator.validate(value());
}
public boolean hasIrrelevantData(int gcBefore)
{
return getLocalDeletionTime() < gcBefore;
}
public static Column create(String value, long timestamp, String... names)
{
return new Column(decomposeName(names), UTF8Type.instance.decompose(value), timestamp);
}
public static Column create(int value, long timestamp, String... names)
{
return new Column(decomposeName(names), Int32Type.instance.decompose(value), timestamp);
}
public static Column create(boolean value, long timestamp, String... names)
{
return new Column(decomposeName(names), BooleanType.instance.decompose(value), timestamp);
}
public static Column create(double value, long timestamp, String... names)
{
return new Column(decomposeName(names), DoubleType.instance.decompose(value), timestamp);
}
public static Column create(ByteBuffer value, long timestamp, String... names)
{
return new Column(decomposeName(names), value, timestamp);
}
public static Column create(InetAddress value, long timestamp, String... names)
{
return new Column(decomposeName(names), InetAddressType.instance.decompose(value), timestamp);
}
public static Column create(long value, long timestamp, String... names)
{
return new Column(decomposeName(names), LongType.instance.decompose(value), timestamp);
}
public static ByteBuffer decomposeName(String... names)
{
assert names.length > 0;
if (names.length == 1)
return UTF8Type.instance.decompose(names[0]);
// not super performant. at this time, only infrequently called schema code uses this.
List<AbstractType<?>> types = new ArrayList<AbstractType<?>>(names.length);
for (int i = 0; i < names.length; i++)
types.add(UTF8Type.instance);
CompositeType.Builder builder = new CompositeType.Builder(CompositeType.getInstance(types));
for (String name : names)
builder.add(UTF8Type.instance.decompose(name));
return builder.build();
}
}
| |
/**
* Copyright 2017 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.dawg.glue;
import java.util.List;
import com.comcast.dawg.DawgTestException;
import com.comcast.dawg.constants.DawgHouseConstants;
import com.comcast.dawg.constants.TestConstants;
import com.comcast.dawg.helper.DawgHouseRestHelper;
import com.comcast.video.dawg.common.MetaStb;
import com.comcast.zucchini.TestContext;
import com.jayway.restassured.response.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import cucumber.api.DataTable;
import cucumber.api.java.en.And;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.When;
/**
* This class contains implementation of dawg house REST services specific to STB Model
* @author Jeeson
*
*/
public class DawgHouseSTBModelRestGlue {
/**
* Logger
*/
private static final Logger LOGGER = LoggerFactory.getLogger(DawgHouseSTBModelRestGlue.class);
/**
* Step definition to 'add an STB model' to dawg house via POST request
* @param data
* DataTable with STB model details
* @throws DawgTestException
*/
@Given("^I send POST request to 'add STB model' with following properties$")
public void sendPostReqToAddSTBModel(DataTable data) throws DawgTestException {
LOGGER.info("Going to send POST request to 'add STB model' with details {}", data.raw().get(1));
List<String> stbParam = data.raw().get(1);
MetaStb modelSTB = DawgHouseRestHelper.getInstance().createTestStb(stbParam.get(0), stbParam.get(1),
stbParam.get(2), stbParam.get(3), null);
TestContext.getCurrent().set(DawgHouseConstants.CONTEXT_STB_MODEL, modelSTB);
boolean result = DawgHouseRestHelper.getInstance().addSTBModelToDawg(stbParam.get(1), stbParam.get(2),
stbParam.get(3));
Assert.assertTrue(result, "Failed to add STB model to dawg house");
LOGGER.info("Has successfully sent POST request to 'add STB model' with details {}", data.raw().get(1));
}
/**
* Step definition to verify that STB model is added to dawg house
* @throws DawgTestException
*/
@And("^I should verify that added STB model is available in the dawg house$")
public void verifySTBModelAdditionToDawgHouse() throws DawgTestException {
LOGGER.info("Going to verify added STB model");
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
boolean result = DawgHouseRestHelper.getInstance().stbModelExistsInDawg(modelSTB.getModel().name());
Assert.assertTrue(result, "Failed to verify presence of STB model in dawg house");
LOGGER.info("Successfully verified added STB model");
}
/**
* Step definition to 'get STB model' from dawg house via GET request
* @throws DawgTestException
*/
@When("^I send GET request to 'get STB model' with valid model id$")
public void sendGetReqForSTBModel() throws DawgTestException {
LOGGER.info("Going to send GET request to 'get STB model' with valid model id");
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
boolean result = DawgHouseRestHelper.getInstance().sendGetReqForSTBModel(modelSTB.getModel().name());
Assert.assertTrue(result, "Failed to get STB model from dawg house");
LOGGER.info("Has successfully sent GET request to 'get STB model' with valid model id");
}
/**
* Step defintion to verify 'get STB model' response
*/
@And("^I should verify that the response contains expected model and family name$")
public void verifyGetSTBModelResponse() {
LOGGER.info("Going to verify 'get STB model' response");
Response response = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_REST_RESPONSE);
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
Assert.assertTrue(response.asString().contains(modelSTB.getModel().name()),
"Failed to verify the STB model name");
Assert.assertTrue(response.asString().contains(modelSTB.getFamily().name()),
"Failed to verify the STB model family name");
LOGGER.info("Successfully verified 'get STB model' response");
}
/**
* Step definition to 'delete STB model' from dawg house via DELETE request
* @throws DawgTestException
*/
@When("^I send DELETE request to 'delete STB model' with valid STB model id as path param$")
public void sendDeleteReqToRemoveSTBModel() throws DawgTestException {
LOGGER.info("Going to send DELETE request to 'delete STB model' with valid model id");
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
boolean result = DawgHouseRestHelper.getInstance().removeSTBModelFromDawg(modelSTB.getId());
Assert.assertTrue(result, "Failed to remove STB model from dawg house");
LOGGER.info("Has successfully sent DELETE request to 'delete STB model' with valid model id");
}
/**
* Step definition to verify that STB model is removed from dawg house
* @throws DawgTestException
*/
@And("^I should verify that the STB is removed from dawg house$")
public void verifySTBModelRemovalFromDawg() throws DawgTestException {
LOGGER.info("Going to verify that the STB is removed from dawg house");
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
boolean result = DawgHouseRestHelper.getInstance().stbModelExistsInDawg(modelSTB.getId());
Assert.assertFalse(result, "Failed to remove STB model from dawg house");
LOGGER.info("Successfully verified that the STB is removed from dawg house");
}
/**
* Step definition to update existing STB model details via POST request
* @param data
* DataTable with STB model details
* @throws DawgTestException
*/
@When("^I send POST request to update same STB model with following properties$")
public void sendPostReqToUpdateSTBModel(DataTable data) throws DawgTestException {
LOGGER.info("Going to send POST request to update same STB model with details {}", data.raw().get(1));
List<String> stbParam = data.raw().get(1);
MetaStb existingModelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
MetaStb newModelSTB = DawgHouseRestHelper.getInstance().createTestStb(existingModelSTB.getId(), stbParam.get(0),
stbParam.get(1), stbParam.get(2), null);
TestContext.getCurrent().set(DawgHouseConstants.CONTEXT_STB_MODEL, newModelSTB);
boolean result = DawgHouseRestHelper.getInstance().addSTBModelToDawg(stbParam.get(0), stbParam.get(1),
stbParam.get(2));
Assert.assertTrue(result, "Failed to add STB model to dawg house");
LOGGER.info("Has successfully sent POST request to update same STB model with details {}", data);
}
/**
* Step definition to verify that STB model details updated are reflected in dawg house
* @throws DawgTestException
*/
@And("^I should verify that the STB model fields are updated in the dawg house$")
public void verifyUpdatedSTBModel() throws DawgTestException {
LOGGER.info("Going to verify updated STB model");
MetaStb modelSTB = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_STB_MODEL);
boolean result = DawgHouseRestHelper.getInstance().stbModelExistsInDawg(modelSTB.getId());
Assert.assertTrue(result, "Failed to verify updates of STB model in dawg house");
Response response = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_REST_RESPONSE);
Assert.assertTrue(response.asString().contains(modelSTB.getModel().name()),
"Failed to verify the STB model name");
Assert.assertTrue(response.asString().contains(modelSTB.getFamily().name()),
"Failed to verify the STB model family name");
Assert.assertTrue(response.asString().contains(modelSTB.getCapabilities().toString().replaceAll("\\[|\\]", "")),
"Failed to verify the STB model capabilities");
LOGGER.info("Successfully verified the presence of fields in updated STB model");
}
/**
* Step definition to 'add an STB' to dawg house via PUT request
* @param id
* STB device id
* @param mac
* STB MAC address
* @param model
* STB model name
* @param capability
* STB model capabilities
* @param family
* STB model family name
* @throws DawgTestException
*/
@When("^I send PUT request to 'add an STB' with STB device id (.*), mac address (.*), STB model name (.*), model capabilities (.*) and model family (.*)$")
public void sendPutReqToAddStbToDawg(String id, String mac, String model, String capability, String family) throws DawgTestException {
LOGGER.info("Going to send PUT request to add STB to dawg house");
boolean result = DawgHouseRestHelper.getInstance().addStbToDawg(id, mac, model, capability, family, null,
TestConstants.RestReqType.DETAILED_STB_DEVICE_PARAM);
Assert.assertTrue(result, "Failed to add STB to dawg house");
LOGGER.info("Has successfully sent PUT request to 'add an STB' to dawg house");
}
/**
* Step definition to assign STB models to dawg house
* @throws DawgTestException
*/
@When("^I send GET request to 'assign models' service$")
public void sendGetReqToAssignModels() throws DawgTestException {
LOGGER.info("Going to send GET request to assign STB models to dawg house");
boolean result = DawgHouseRestHelper.getInstance().assignModelDawg();
Assert.assertTrue(result, "Failed to assign STB models to dawg house");
LOGGER.info("Has successfully sent GET request to assign STB models to dawg house");
}
/**
* Step definition to verify assign models service in dawg house
* @param id
* STB device id
* @param mac
* STB MAC address
* @param expectedModel
* expected STB model name
* @param expectedCaps
* expected STB model capabilities
* @param expectedFamily
* expected STB model family name
* @throws DawgTestException
*/
@And("^I should verify that added STB device contains STB device id (.*), mac address (.*), STB model name (.*), expected model capabilities (.*) and model family (.*)$")
public void verifyAssignModels(String id, String mac, String expectedModel, String expectedCaps, String expectedFamily) throws DawgTestException {
LOGGER.info("Going to verify assign models service in dawg house");
boolean result = DawgHouseRestHelper.getInstance().stbDeviceExistsInDawg(id);
Assert.assertTrue(result, "Failed to assign STB models to dawg house");
Response getModelRes = TestContext.getCurrent().get(DawgHouseConstants.CONTEXT_REST_RESPONSE);
Assert.assertTrue(getModelRes.asString().contains(expectedModel), "Failed to verify the STB model name");
Assert.assertTrue(getModelRes.asString().contains(expectedCaps), "Failed to verify the STB model capabilities");
Assert.assertTrue(getModelRes.asString().contains(expectedFamily),
"Failed to verify the STB model family name");
LOGGER.info("Successfully verified assign models service in dawg house");
}
/**
* Step definition to 'add an STB model' to dawg house via POST request with given params
* @param id
* STB device id
* @param model
* STB model
* @throws DawgTestException
*/
@Given("^I send POST request to 'add STB model' with STB device id (.*), STB model name (.*)$")
public void sendPostReqToAddSTBModelWithGivenParams(String id, String model) throws DawgTestException {
LOGGER.info("Going to send POST request to 'add STB model' with STB id {}, STB model {}", id, model);
MetaStb modelSTB = DawgHouseRestHelper.getInstance().createTestStb(id, model, TestConstants.INIT_MODEL_CAPS,
TestConstants.INIT_MODEL_FAMILY, null);
TestContext.getCurrent().set(DawgHouseConstants.CONTEXT_STB_MODEL, modelSTB);
boolean result = DawgHouseRestHelper.getInstance().addSTBModelToDawg(model, TestConstants.INIT_MODEL_CAPS,
TestConstants.INIT_MODEL_FAMILY);
Assert.assertTrue(result, "Failed to add STB model to dawg house");
LOGGER.info("Has successfully sent POST request to 'add STB model' with STB id {}, STB model {}", id, model);
}
}
| |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.rest.api.service;
import static java.util.Arrays.asList;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.io.Resources;
import io.gravitee.common.http.HttpMethod;
import io.gravitee.definition.model.Endpoint;
import io.gravitee.definition.model.Rule;
import io.gravitee.definition.model.VirtualHost;
import io.gravitee.policy.api.swagger.Policy;
import io.gravitee.rest.api.model.*;
import io.gravitee.rest.api.model.api.SwaggerApiEntity;
import io.gravitee.rest.api.model.api.UpdateApiEntity;
import io.gravitee.rest.api.service.impl.SwaggerServiceImpl;
import io.gravitee.rest.api.service.impl.swagger.policy.PolicyOperationVisitor;
import io.gravitee.rest.api.service.impl.swagger.policy.PolicyOperationVisitorManager;
import io.gravitee.rest.api.service.impl.swagger.policy.impl.OAIPolicyOperationVisitor;
import io.gravitee.rest.api.service.impl.swagger.visitor.v3.OAIOperationVisitor;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
import java.util.stream.Collectors;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatchers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
/**
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
@RunWith(MockitoJUnitRunner.class)
public class SwaggerService_CreateAPITest {
@Mock
private PolicyOperationVisitorManager policyOperationVisitorManager;
@Mock
private GroupService groupService;
@Mock
private TagService tagService;
@InjectMocks
protected SwaggerServiceImpl swaggerService;
@Before
public void setup() {
PolicyOperationVisitor swaggerPolicyOperationVisitor = mock(PolicyOperationVisitor.class);
when(swaggerPolicyOperationVisitor.getId()).thenReturn("mock");
PolicyOperationVisitor oaiPolicyOperationVisitor = mock(PolicyOperationVisitor.class);
when(oaiPolicyOperationVisitor.getId()).thenReturn("mock");
io.gravitee.policy.api.swagger.v3.OAIOperationVisitor oaiPolicyOperationVisitorImpl = mock(
io.gravitee.policy.api.swagger.v3.OAIOperationVisitor.class
);
when(policyOperationVisitorManager.getPolicyVisitors())
.thenReturn(asList(swaggerPolicyOperationVisitor, oaiPolicyOperationVisitor));
OAIOperationVisitor op = mock(OAIPolicyOperationVisitor.class);
when(op.visit(any(), any())).thenReturn(Optional.of(new Policy()));
when(policyOperationVisitorManager.getOAIOperationVisitor(anyString())).thenReturn(op);
GroupEntity grp1 = new GroupEntity();
grp1.setId("group1");
GroupEntity grp2 = new GroupEntity();
grp2.setId("group2");
when(groupService.findByName("group1")).thenReturn(Arrays.asList(grp1));
when(groupService.findByName("group2")).thenReturn(Arrays.asList(grp2));
TagEntity tag1 = new TagEntity();
tag1.setId("tagId1");
tag1.setName("tag1");
TagEntity tag2 = new TagEntity();
tag2.setId("tagId2");
tag2.setName("tag2");
when(tagService.findByReference(any(), any())).thenReturn(Arrays.asList(tag1, tag2));
}
// Swagger v1
@Test
public void shouldPrepareAPIFromSwaggerV1_URL_json() throws IOException {
validate(prepareUrl("io/gravitee/rest/api/management/service/swagger-v1.json"));
}
@Test
public void shouldPrepareAPIFromSwaggerV1_Inline_json() throws IOException {
validate(prepareInline("io/gravitee/rest/api/management/service/swagger-v1.json", true));
}
// Swagger v2
@Test
public void shouldPrepareAPIFromSwaggerV2_URL_json() throws IOException {
validate(prepareUrl("io/gravitee/rest/api/management/service/swagger-v2.json"));
}
@Test
public void shouldPrepareAPIFromSwaggerV2_Inline_json() throws IOException {
validate(prepareInline("io/gravitee/rest/api/management/service/swagger-v2.json", true));
}
@Test
public void shouldPrepareAPIFromSwaggerV2_URL_json_extensions() throws IOException {
final SwaggerApiEntity swaggerApiEntity = prepareUrl("io/gravitee/rest/api/management/service/swagger-withExtensions-v2.json");
validate(swaggerApiEntity);
validateExtensions(swaggerApiEntity);
}
@Test
public void shouldPrepareAPIFromSwaggerV2_URL_yaml() throws IOException {
validate(prepareUrl("io/gravitee/rest/api/management/service/swagger-v2.yaml"));
}
@Test
public void shouldPrepareAPIFromSwaggerV2_Inline_yaml() throws IOException {
validate(prepareInline("io/gravitee/rest/api/management/service/swagger-v2.yaml", true));
}
@Test
public void shouldPrepareAPIFromSwaggerV2_URL_yaml_extensions() throws IOException {
final SwaggerApiEntity swaggerApiEntity = prepareUrl("io/gravitee/rest/api/management/service/swagger-withExtensions-v2.yaml");
validate(swaggerApiEntity);
validateExtensions(swaggerApiEntity);
}
// OpenAPI
@Test
public void shouldPrepareAPIFromSwaggerV3_URL_json() throws IOException {
validate(prepareUrl("io/gravitee/rest/api/management/service/openapi.json"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3_Inline_json() throws IOException {
validate(prepareInline("io/gravitee/rest/api/management/service/openapi.json", true));
}
@Test
public void shouldPrepareAPIFromSwaggerV3_URL_json_extensions() throws IOException {
final SwaggerApiEntity swaggerApiEntity = prepareUrl("io/gravitee/rest/api/management/service/openapi-withExtensions.json");
validate(swaggerApiEntity);
validateExtensions(swaggerApiEntity);
}
@Test
public void shouldPrepareAPIFromSwaggerV3_URL_yaml() throws IOException {
validate(prepareUrl("io/gravitee/rest/api/management/service/openapi.yaml"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3_Inline_yaml() throws IOException {
validate(prepareInline("io/gravitee/rest/api/management/service/openapi.yaml", true));
}
@Test
public void shouldPrepareAPIFromSwaggerV3_URL_yaml_extensions() throws IOException {
final SwaggerApiEntity swaggerApiEntity = prepareUrl("io/gravitee/rest/api/management/service/openapi-withExtensions.yaml");
validate(swaggerApiEntity);
validateExtensions(swaggerApiEntity);
}
private void validateExtensions(UpdateApiEntity updateApiEntity) {
final List<VirtualHost> virtualHosts = updateApiEntity.getProxy().getVirtualHosts();
assertEquals(1, virtualHosts.size());
VirtualHost vHost = virtualHosts.get(0);
assertEquals("myHost", vHost.getHost());
assertEquals("myPath", vHost.getPath());
assertEquals(false, vHost.isOverrideEntrypoint());
final Set<String> categories = updateApiEntity.getCategories();
assertEquals(2, categories.size());
assertTrue(categories.containsAll(asList("cat1", "cat2")));
final Set<String> groups = updateApiEntity.getGroups();
assertEquals(2, groups.size());
assertTrue(groups.containsAll(asList("group1", "group2")));
final List<String> labels = updateApiEntity.getLabels();
assertEquals(2, labels.size());
assertTrue(labels.containsAll(asList("label1", "label2")));
final Set<String> tags = updateApiEntity.getTags();
assertEquals(2, tags.size());
assertTrue(tags.containsAll(asList("tagId1", "tagId2")));
final Map<String, String> properties = updateApiEntity.getProperties().getValues();
assertEquals(2, properties.size());
assertTrue(properties.keySet().containsAll(asList("prop1", "prop2")));
assertTrue(properties.values().containsAll(asList("propValue1", "propValue2")));
final Map<String, String> metadata = updateApiEntity
.getMetadata()
.stream()
.collect(Collectors.toMap(ApiMetadataEntity::getName, ApiMetadataEntity::getValue));
assertEquals(2, metadata.size());
assertTrue(metadata.keySet().containsAll(asList("meta1", "meta2")));
assertTrue(metadata.values().containsAll(asList("1234", "metaValue2")));
assertEquals(Visibility.PRIVATE, updateApiEntity.getVisibility());
assertEquals("data:image/png;base64,XXXXXXX", updateApiEntity.getPicture());
}
protected void validate(SwaggerApiEntity api) {
assertEquals("1.2.3", api.getVersion());
assertEquals("Gravitee.io Swagger API", api.getName());
assertEquals(
"https://demo.gravitee.io/gateway/echo",
api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget()
);
validatePolicies(api, 2, 0, asList("/pets", "/pets/:petId"));
}
protected void validatePolicies(SwaggerApiEntity api, int expectedPathSize, int expectedOperationSize, List<String> expectedPaths) {
assertEquals(expectedPathSize, api.getPaths().size());
assertTrue(api.getPaths().keySet().containsAll(expectedPaths));
List<HttpMethod> operations = api
.getPaths()
.values()
.stream()
.map(
new Function<List<Rule>, Set<HttpMethod>>() {
@Nullable
@Override
public Set<HttpMethod> apply(@Nullable List<Rule> rules) {
Set<HttpMethod> collect = rules
.stream()
.map(
new Function<Rule, List<HttpMethod>>() {
@Nullable
@Override
public List<HttpMethod> apply(@Nullable Rule rule) {
return new ArrayList(rule.getMethods());
}
}
)
.flatMap(Collection::stream)
.collect(Collectors.toSet());
return collect;
}
}
)
.flatMap(Collection::stream)
.collect(Collectors.toList());
assertEquals(expectedOperationSize, operations.size());
}
private SwaggerApiEntity prepareInline(String file) throws IOException {
return prepareInline(file, false, false);
}
private SwaggerApiEntity prepareInline(String file, boolean withPolicyPaths) throws IOException {
return prepareInline(file, withPolicyPaths, false);
}
protected SwaggerApiEntity prepareInline(String file, boolean withPolicyPaths, boolean withPolicies) throws IOException {
URL url = Resources.getResource(file);
String descriptor = Resources.toString(url, Charsets.UTF_8);
ImportSwaggerDescriptorEntity swaggerDescriptor = new ImportSwaggerDescriptorEntity();
swaggerDescriptor.setType(ImportSwaggerDescriptorEntity.Type.INLINE);
swaggerDescriptor.setPayload(descriptor);
swaggerDescriptor.setWithPolicyPaths(withPolicyPaths);
if (withPolicies) {
swaggerDescriptor.setWithPolicies(asList("mock"));
}
return this.createAPI(swaggerDescriptor);
}
private SwaggerApiEntity prepareUrl(String file) {
URL url = Resources.getResource(file);
ImportSwaggerDescriptorEntity swaggerDescriptor = new ImportSwaggerDescriptorEntity();
swaggerDescriptor.setType(ImportSwaggerDescriptorEntity.Type.URL);
swaggerDescriptor.setWithPolicyPaths(true);
swaggerDescriptor.setWithPathMapping(true);
// swaggerDescriptor.setWithPolicies(asList("mock"));
try {
swaggerDescriptor.setPayload(url.toURI().getPath());
} catch (URISyntaxException e) {
fail(e.getMessage());
}
return this.createAPI(swaggerDescriptor);
}
protected SwaggerApiEntity createAPI(ImportSwaggerDescriptorEntity swaggerDescriptor) {
return swaggerService.createAPI(swaggerDescriptor);
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithExamples() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/api-with-examples.yaml", true, true);
assertEquals("2.0.0", api.getVersion());
assertEquals("Simple API overview", api.getName());
assertEquals("simpleapioverview", api.getProxy().getVirtualHosts().get(0).getPath());
assertEquals("/", api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget());
validatePolicies(api, 2, 2, asList("/", "/v2"));
validateRules(api, "/", 2, asList(HttpMethod.GET), "List API versions");
}
protected void validateRules(
SwaggerApiEntity api,
String path,
int expectedRuleSize,
List<HttpMethod> firstRuleMethods,
String firstRuleDescription
) {
List<Rule> rules = api.getPaths().get(path);
assertEquals(expectedRuleSize, rules.size());
Rule rule = rules.get(0);
assertTrue(rule.getMethods().containsAll(firstRuleMethods));
assertEquals(firstRuleDescription, rule.getDescription());
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithSimpleTypedExamples() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/callback-example.yaml", true, true);
assertEquals("1.0.0", api.getVersion());
assertEquals("Callback Example", api.getName());
assertEquals("callbackexample", api.getProxy().getVirtualHosts().get(0).getPath());
assertEquals("/", api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget());
validatePolicies(api, 1, 1, asList("/streams"));
validateRules(api, "/streams", 2, asList(HttpMethod.POST), "subscribes a client to receive out-of-band data");
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithLinks() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/link-example.yaml", true, true);
assertEquals("1.0.0", api.getVersion());
assertEquals("Link Example", api.getName());
assertEquals("linkexample", api.getProxy().getVirtualHosts().get(0).getPath());
assertEquals("/", api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget());
validatePolicies(
api,
6,
6,
asList(
"/2.0/users/:username",
"/2.0/repositories/:username/:slug",
"/2.0/repositories/:username/:slug/pullrequests",
"/2.0/repositories/:username/:slug/pullrequests/:pid",
"/2.0/repositories/:username/:slug/pullrequests/:pid/merge"
)
);
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithPetstore() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/petstore.yaml", true, true);
assertEquals("1.0.0", api.getVersion());
assertEquals("/v1", api.getProxy().getVirtualHosts().get(0).getPath());
assertEquals("Swagger Petstore", api.getName());
assertEquals(
"http://petstore.swagger.io/v1",
api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget()
);
validatePolicies(api, 2, 3, asList("/pets", "/pets/:petId"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithPetstoreExpanded() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/petstore-expanded.yaml", true, true);
assertEquals("1.0.0", api.getVersion());
assertEquals("Swagger Petstore", api.getName());
assertEquals("/api", api.getProxy().getVirtualHosts().get(0).getPath());
assertEquals(
"http://petstore.swagger.io/api",
api.getProxy().getGroups().iterator().next().getEndpoints().iterator().next().getTarget()
);
validatePolicies(api, 2, 4, asList("/pets", "/pets/:id"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithExample() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/uspto.yaml", true, true);
assertEquals("1.0.0", api.getVersion());
assertEquals("USPTO Data Set API", api.getName());
assertEquals("/ds-api", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(2, 4, endpoints.size());
assertTrue(endpoints.contains("http://developer.uspto.gov/ds-api"));
assertTrue(endpoints.contains("https://developer.uspto.gov/ds-api"));
validatePolicies(api, 3, 3, asList("/", "/:dataset/:version/fields", "/:dataset/:version/records"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithEnumExample() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/enum-example.yml", true, true);
assertEquals("v1", api.getVersion());
assertEquals("Gravitee Import Mock Example", api.getName());
assertEquals("graviteeimportmockexample", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(1, endpoints.size());
assertTrue(endpoints.contains("/"));
validatePolicies(api, 1, 1, asList("/"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithMonoServer() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/openapi-monoserver.yaml", true);
assertEquals("/v1", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(1, 2, endpoints.size());
assertTrue(endpoints.contains("https://development.gigantic-server.com/v1"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithMultiServer() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/openapi-multiserver.yaml", true);
assertEquals("/v1", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(3, endpoints.size());
assertTrue(endpoints.contains("https://development.gigantic-server.com/v1"));
assertTrue(endpoints.contains("https://staging.gigantic-server.com/v1"));
assertTrue(endpoints.contains("https://api.gigantic-server.com/v1"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithNoServer() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/openapi-noserver.yaml", true);
assertEquals("noserver", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(1, endpoints.size());
assertTrue(endpoints.contains("/"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithVariablesInServer() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/openapi-variables-in-server.yaml", true);
assertEquals("/v2", api.getProxy().getVirtualHosts().get(0).getPath());
final List<String> endpoints = api
.getProxy()
.getGroups()
.iterator()
.next()
.getEndpoints()
.stream()
.map(Endpoint::getTarget)
.collect(Collectors.toList());
assertEquals(2, endpoints.size());
assertTrue(endpoints.contains("https://demo.gigantic-server.com:443/v2"));
assertTrue(endpoints.contains("https://demo.gigantic-server.com:8443/v2"));
}
@Test
public void shouldPrepareAPIFromSwaggerV3WithComplexReferences() throws IOException {
final SwaggerApiEntity api = prepareInline("io/gravitee/rest/api/management/service/mock/json-api.yml", true, true);
validatePolicies(api, 2, 5, asList("/drives"));
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.web.position;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertTrue;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.springframework.core.io.FileSystemResourceLoader;
import org.springframework.mock.web.MockServletContext;
import org.testng.annotations.BeforeMethod;
import org.threeten.bp.LocalDate;
import org.threeten.bp.LocalTime;
import org.threeten.bp.OffsetTime;
import org.threeten.bp.ZoneOffset;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource;
import com.opengamma.core.id.ExternalSchemes;
import com.opengamma.core.position.Counterparty;
import com.opengamma.engine.InMemorySecuritySource;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.ExternalScheme;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.master.config.impl.InMemoryConfigMaster;
import com.opengamma.master.config.impl.MasterConfigSource;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
import com.opengamma.master.historicaltimeseries.impl.DefaultHistoricalTimeSeriesResolver;
import com.opengamma.master.historicaltimeseries.impl.DefaultHistoricalTimeSeriesSelector;
import com.opengamma.master.historicaltimeseries.impl.InMemoryHistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.impl.MasterHistoricalTimeSeriesSource;
import com.opengamma.master.position.ManageablePosition;
import com.opengamma.master.position.ManageableTrade;
import com.opengamma.master.position.PositionDocument;
import com.opengamma.master.position.PositionMaster;
import com.opengamma.master.position.PositionSearchRequest;
import com.opengamma.master.position.PositionSearchResult;
import com.opengamma.master.position.impl.InMemoryPositionMaster;
import com.opengamma.master.security.ManageableSecurityLink;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityLoader;
import com.opengamma.master.security.SecurityLoaderRequest;
import com.opengamma.master.security.SecurityLoaderResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.impl.AbstractSecurityLoader;
import com.opengamma.master.security.impl.InMemorySecurityMaster;
import com.opengamma.util.money.Currency;
import com.opengamma.util.test.TestGroup;
import com.opengamma.web.FreemarkerOutputter;
import com.opengamma.web.MockUriInfo;
import com.opengamma.web.WebResourceTestUtils;
import freemarker.template.Configuration;
/**
* Test base class for WebPositionResource tests
*/
public abstract class AbstractWebPositionResourceTestCase {
protected static final ExternalId COUNTER_PARTY = ExternalId.of(Counterparty.DEFAULT_SCHEME, "BACS");
protected static final ZoneOffset ZONE_OFFSET = ZoneOffset.of("+0100");
protected static final EquitySecurity EQUITY_SECURITY = WebResourceTestUtils.getEquitySecurity();
protected static final ExternalId SEC_ID = EQUITY_SECURITY.getExternalIdBundle().getExternalId(ExternalSchemes.BLOOMBERG_TICKER);
protected static final ManageableSecurityLink SECURITY_LINK = new ManageableSecurityLink(EQUITY_SECURITY.getExternalIdBundle());
protected static final String EMPTY_TRADES = "{\"trades\" : []}";
protected static final Long QUANTITY = Long.valueOf(100);
protected SecurityMaster _secMaster;
protected SecurityLoader _secLoader;
protected HistoricalTimeSeriesSource _htsSource;
protected WebPositionsResource _webPositionsResource;
protected InMemorySecuritySource _securitySource;
protected PositionMaster _positionMaster;
protected List<ManageableTrade> _trades;
protected UriInfo _uriInfo;
protected Map<ExternalScheme, String> _externalSchemes;
@BeforeMethod(groups = TestGroup.UNIT)
public void setUp() throws Exception {
_uriInfo = new MockUriInfo(true);
_trades = getTrades();
_secMaster = new InMemorySecurityMaster(new ObjectIdSupplier("Mock"));
_positionMaster = new InMemoryPositionMaster();
final MasterConfigSource configSource = new MasterConfigSource(new InMemoryConfigMaster());
final InMemoryHistoricalTimeSeriesMaster htsMaster = new InMemoryHistoricalTimeSeriesMaster();
final HistoricalTimeSeriesResolver htsResolver = new DefaultHistoricalTimeSeriesResolver(new DefaultHistoricalTimeSeriesSelector(configSource), htsMaster);
_htsSource = new MasterHistoricalTimeSeriesSource(htsMaster, htsResolver);
_securitySource = new InMemorySecuritySource();
_secLoader = new AbstractSecurityLoader() {
@Override
protected SecurityLoaderResult doBulkLoad(final SecurityLoaderRequest request) {
return null;
}
@Override
public UniqueId loadSecurity(final ExternalIdBundle externalIdBundle) {
final SecurityLoaderRequest request = SecurityLoaderRequest.create(externalIdBundle);
final SecurityLoaderResult result = loadSecurities(request);
if (result == null || result.getResultMap().size() == 0) {
return null;
}
return Iterables.getOnlyElement(result.getResultMap().values());
}
};
populateSecMaster();
_externalSchemes = new HashMap<>();
_externalSchemes.put(ExternalSchemes.OG_SYNTHETIC_TICKER, ExternalSchemes.OG_SYNTHETIC_TICKER.getName());
_webPositionsResource = new WebPositionsResource(_positionMaster, _secLoader, _securitySource, _htsSource, _externalSchemes);
final MockServletContext sc = new MockServletContext("/web-engine", new FileSystemResourceLoader());
final Configuration cfg = FreemarkerOutputter.createConfiguration();
cfg.setServletContextForTemplateLoading(sc, "WEB-INF/pages");
FreemarkerOutputter.init(sc, cfg);
_webPositionsResource.setServletContext(sc);
_webPositionsResource.setUriInfo(_uriInfo);
}
protected List<ManageableTrade> getTrades() {
final List<ManageableTrade> trades = Lists.newArrayList();
final ManageableTrade trade1 = new ManageableTrade(BigDecimal.valueOf(50), SEC_ID, LocalDate.parse("2011-12-07"), OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET), COUNTER_PARTY);
trade1.setPremium(10.0);
trade1.setPremiumCurrency(Currency.USD);
trade1.setPremiumDate(LocalDate.parse("2011-12-08"));
trade1.setPremiumTime(OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET));
trades.add(trade1);
final ManageableTrade trade2 = new ManageableTrade(BigDecimal.valueOf(60), SEC_ID, LocalDate.parse("2011-12-08"), OffsetTime.of(LocalTime.of(16, 4), ZONE_OFFSET), COUNTER_PARTY);
trade2.setPremium(20.0);
trade2.setPremiumCurrency(Currency.USD);
trade2.setPremiumDate(LocalDate.parse("2011-12-09"));
trade2.setPremiumTime(OffsetTime.of(LocalTime.of(16, 4), ZONE_OFFSET));
trades.add(trade2);
final ManageableTrade trade3 = new ManageableTrade(BigDecimal.valueOf(70), SEC_ID, LocalDate.parse("2011-12-09"), OffsetTime.of(LocalTime.of(17, 4), ZONE_OFFSET), COUNTER_PARTY);
trade3.setPremium(30.0);
trade3.setPremiumCurrency(Currency.USD);
trade3.setPremiumDate(LocalDate.parse("2011-12-10"));
trade3.setPremiumTime(OffsetTime.of(LocalTime.of(17, 4), ZONE_OFFSET));
trades.add(trade3);
return trades;
}
protected void populateSecMaster() {
final SecurityDocument added = _secMaster.add(new SecurityDocument(EQUITY_SECURITY));
_securitySource.addSecurity(added.getSecurity());
}
protected void populatePositionMaster() {
for (final ManageableTrade trade : _trades) {
final ManageablePosition manageablePosition = new ManageablePosition(trade.getQuantity(), SEC_ID);
manageablePosition.addTrade(trade);
final PositionDocument positionDocument = new PositionDocument(manageablePosition);
_positionMaster.add(positionDocument);
}
}
protected String getTradesJson() throws Exception {
return WebResourceTestUtils.loadJson("com/opengamma/web/position/tradesJson.txt").toString();
}
protected void assertPositionWithNoTrades() {
final PositionSearchRequest request = new PositionSearchRequest();
final PositionSearchResult searchResult = _positionMaster.search(request);
assertNotNull(searchResult);
final List<PositionDocument> docs = searchResult.getDocuments();
assertNotNull(docs);
assertEquals(1, docs.size());
final ManageablePosition position = docs.get(0).getPosition();
assertEquals(BigDecimal.TEN, position.getQuantity());
assertEquals(SECURITY_LINK, position.getSecurityLink());
assertTrue(position.getTrades().isEmpty());
}
protected void assertPositionAndTrades() {
final PositionSearchRequest request = new PositionSearchRequest();
final PositionSearchResult searchResult = _positionMaster.search(request);
assertNotNull(searchResult);
final List<PositionDocument> docs = searchResult.getDocuments();
assertNotNull(docs);
assertEquals(1, docs.size());
final ManageablePosition position = docs.get(0).getPosition();
assertEquals(BigDecimal.TEN, position.getQuantity());
assertEquals(SECURITY_LINK, position.getSecurityLink());
final List<ManageableTrade> trades = position.getTrades();
assertEquals(3, trades.size());
for (final ManageableTrade trade : trades) {
assertEquals(SECURITY_LINK, trade.getSecurityLink());
trade.setUniqueId(null);
trade.setSecurityLink(new ManageableSecurityLink(SEC_ID));
trade.setParentPositionId(null);
assertTrue(_trades.contains(trade));
}
}
protected UniqueId addPosition() {
final ManageableTrade origTrade = new ManageableTrade(BigDecimal.valueOf(50), SEC_ID, LocalDate.parse("2011-12-07"), OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET), COUNTER_PARTY);
origTrade.setPremium(10.0);
origTrade.setPremiumCurrency(Currency.USD);
origTrade.setPremiumDate(LocalDate.parse("2011-12-08"));
origTrade.setPremiumTime(OffsetTime.of(LocalTime.of(15, 4), ZONE_OFFSET));
final ManageablePosition manageablePosition = new ManageablePosition(origTrade.getQuantity(), EQUITY_SECURITY.getExternalIdBundle());
manageablePosition.addTrade(origTrade);
final PositionDocument addedPos = _positionMaster.add(new PositionDocument(manageablePosition));
final UniqueId uid = addedPos.getUniqueId();
return uid;
}
protected String getActualURL(final Response response) {
return response.getMetadata().getFirst("Location").toString();
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.oauth2.client.web;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.oauth2.client.ClientAuthorizationException;
import org.springframework.security.oauth2.client.OAuth2AuthorizationContext;
import org.springframework.security.oauth2.client.OAuth2AuthorizationFailureHandler;
import org.springframework.security.oauth2.client.OAuth2AuthorizationSuccessHandler;
import org.springframework.security.oauth2.client.OAuth2AuthorizeRequest;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClientProvider;
import org.springframework.security.oauth2.client.RemoveAuthorizedClientOAuth2AuthorizationFailureHandler;
import org.springframework.security.oauth2.client.registration.ClientRegistration;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.registration.TestClientRegistrations;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2ErrorCodes;
import org.springframework.security.oauth2.core.TestOAuth2AccessTokens;
import org.springframework.security.oauth2.core.TestOAuth2RefreshTokens;
import org.springframework.security.oauth2.core.endpoint.OAuth2ParameterNames;
import org.springframework.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
/**
* Tests for {@link DefaultOAuth2AuthorizedClientManager}.
*
* @author Joe Grandja
*/
public class DefaultOAuth2AuthorizedClientManagerTests {
private ClientRegistrationRepository clientRegistrationRepository;
private OAuth2AuthorizedClientRepository authorizedClientRepository;
private OAuth2AuthorizedClientProvider authorizedClientProvider;
private Function contextAttributesMapper;
private OAuth2AuthorizationSuccessHandler authorizationSuccessHandler;
private OAuth2AuthorizationFailureHandler authorizationFailureHandler;
private DefaultOAuth2AuthorizedClientManager authorizedClientManager;
private ClientRegistration clientRegistration;
private Authentication principal;
private OAuth2AuthorizedClient authorizedClient;
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private ArgumentCaptor<OAuth2AuthorizationContext> authorizationContextCaptor;
@SuppressWarnings("unchecked")
@Before
public void setup() {
this.clientRegistrationRepository = mock(ClientRegistrationRepository.class);
this.authorizedClientRepository = mock(OAuth2AuthorizedClientRepository.class);
this.authorizedClientProvider = mock(OAuth2AuthorizedClientProvider.class);
this.contextAttributesMapper = mock(Function.class);
this.authorizationSuccessHandler = spy(new OAuth2AuthorizationSuccessHandler() {
@Override
public void onAuthorizationSuccess(OAuth2AuthorizedClient authorizedClient, Authentication principal,
Map<String, Object> attributes) {
DefaultOAuth2AuthorizedClientManagerTests.this.authorizedClientRepository.saveAuthorizedClient(
authorizedClient, principal,
(HttpServletRequest) attributes.get(HttpServletRequest.class.getName()),
(HttpServletResponse) attributes.get(HttpServletResponse.class.getName()));
}
});
this.authorizationFailureHandler = spy(
new RemoveAuthorizedClientOAuth2AuthorizationFailureHandler((clientRegistrationId, principal,
attributes) -> this.authorizedClientRepository.removeAuthorizedClient(clientRegistrationId,
principal, (HttpServletRequest) attributes.get(HttpServletRequest.class.getName()),
(HttpServletResponse) attributes.get(HttpServletResponse.class.getName()))));
this.authorizedClientManager = new DefaultOAuth2AuthorizedClientManager(this.clientRegistrationRepository,
this.authorizedClientRepository);
this.authorizedClientManager.setAuthorizedClientProvider(this.authorizedClientProvider);
this.authorizedClientManager.setContextAttributesMapper(this.contextAttributesMapper);
this.authorizedClientManager.setAuthorizationSuccessHandler(this.authorizationSuccessHandler);
this.authorizedClientManager.setAuthorizationFailureHandler(this.authorizationFailureHandler);
this.clientRegistration = TestClientRegistrations.clientRegistration().build();
this.principal = new TestingAuthenticationToken("principal", "password");
this.authorizedClient = new OAuth2AuthorizedClient(this.clientRegistration, this.principal.getName(),
TestOAuth2AccessTokens.scopes("read", "write"), TestOAuth2RefreshTokens.refreshToken());
this.request = new MockHttpServletRequest();
this.response = new MockHttpServletResponse();
this.authorizationContextCaptor = ArgumentCaptor.forClass(OAuth2AuthorizationContext.class);
}
@Test
public void constructorWhenClientRegistrationRepositoryIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new DefaultOAuth2AuthorizedClientManager(null, this.authorizedClientRepository))
.withMessage("clientRegistrationRepository cannot be null");
}
@Test
public void constructorWhenOAuth2AuthorizedClientRepositoryIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new DefaultOAuth2AuthorizedClientManager(this.clientRegistrationRepository, null))
.withMessage("authorizedClientRepository cannot be null");
}
@Test
public void setAuthorizedClientProviderWhenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizedClientManager.setAuthorizedClientProvider(null))
.withMessage("authorizedClientProvider cannot be null");
}
@Test
public void setContextAttributesMapperWhenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizedClientManager.setContextAttributesMapper(null))
.withMessage("contextAttributesMapper cannot be null");
}
@Test
public void setAuthorizationSuccessHandlerWhenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizedClientManager.setAuthorizationSuccessHandler(null))
.withMessage("authorizationSuccessHandler cannot be null");
}
@Test
public void setAuthorizationFailureHandlerWhenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.authorizedClientManager.setAuthorizationFailureHandler(null))
.withMessage("authorizationFailureHandler cannot be null");
}
@Test
public void authorizeWhenRequestIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientManager.authorize(null))
.withMessage("authorizeRequest cannot be null");
}
@Test
public void authorizeWhenHttpServletRequestIsNullThenThrowIllegalArgumentException() {
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId()).principal(this.principal)
.build();
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientManager.authorize(authorizeRequest))
.withMessage("servletRequest cannot be null");
}
@Test
public void authorizeWhenHttpServletResponseIsNullThenThrowIllegalArgumentException() {
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId()).principal(this.principal)
.attribute(HttpServletRequest.class.getName(), this.request).build();
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientManager.authorize(authorizeRequest))
.withMessage("servletResponse cannot be null");
}
@Test
public void authorizeWhenClientRegistrationNotFoundThenThrowIllegalArgumentException() {
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId("invalid-registration-id")
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
assertThatIllegalArgumentException().isThrownBy(() -> this.authorizedClientManager.authorize(authorizeRequest))
.withMessage("Could not find ClientRegistration with id 'invalid-registration-id'");
}
@SuppressWarnings("unchecked")
@Test
public void authorizeWhenNotAuthorizedAndUnsupportedProviderThenNotAuthorized() {
given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId())))
.willReturn(this.clientRegistration);
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId())
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
verify(this.contextAttributesMapper).apply(eq(authorizeRequest));
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authorizationContext.getAuthorizedClient()).isNull();
assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal);
assertThat(authorizedClient).isNull();
verifyNoInteractions(this.authorizationSuccessHandler);
verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(), any(), any(), any());
}
@SuppressWarnings("unchecked")
@Test
public void authorizeWhenNotAuthorizedAndSupportedProviderThenAuthorized() {
given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId())))
.willReturn(this.clientRegistration);
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willReturn(this.authorizedClient);
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId())
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
verify(this.contextAttributesMapper).apply(eq(authorizeRequest));
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authorizationContext.getAuthorizedClient()).isNull();
assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal);
assertThat(authorizedClient).isSameAs(this.authorizedClient);
verify(this.authorizationSuccessHandler).onAuthorizationSuccess(eq(this.authorizedClient), eq(this.principal),
any());
verify(this.authorizedClientRepository).saveAuthorizedClient(eq(this.authorizedClient), eq(this.principal),
eq(this.request), eq(this.response));
}
@SuppressWarnings("unchecked")
@Test
public void authorizeWhenAuthorizedAndSupportedProviderThenReauthorized() {
given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId())))
.willReturn(this.clientRegistration);
given(this.authorizedClientRepository.loadAuthorizedClient(eq(this.clientRegistration.getRegistrationId()),
eq(this.principal), eq(this.request))).willReturn(this.authorizedClient);
OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration,
this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken());
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willReturn(reauthorizedClient);
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId())
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(authorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
verify(this.contextAttributesMapper).apply(any());
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient);
assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal);
assertThat(authorizedClient).isSameAs(reauthorizedClient);
verify(this.authorizationSuccessHandler).onAuthorizationSuccess(eq(reauthorizedClient), eq(this.principal),
any());
verify(this.authorizedClientRepository).saveAuthorizedClient(eq(reauthorizedClient), eq(this.principal),
eq(this.request), eq(this.response));
}
@Test
public void authorizeWhenRequestParameterUsernamePasswordThenMappedToContext() {
given(this.clientRegistrationRepository.findByRegistrationId(eq(this.clientRegistration.getRegistrationId())))
.willReturn(this.clientRegistration);
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willReturn(this.authorizedClient);
// Set custom contextAttributesMapper
this.authorizedClientManager.setContextAttributesMapper((authorizeRequest) -> {
Map<String, Object> contextAttributes = new HashMap<>();
HttpServletRequest servletRequest = authorizeRequest.getAttribute(HttpServletRequest.class.getName());
String username = servletRequest.getParameter(OAuth2ParameterNames.USERNAME);
String password = servletRequest.getParameter(OAuth2ParameterNames.PASSWORD);
if (StringUtils.hasText(username) && StringUtils.hasText(password)) {
contextAttributes.put(OAuth2AuthorizationContext.USERNAME_ATTRIBUTE_NAME, username);
contextAttributes.put(OAuth2AuthorizationContext.PASSWORD_ATTRIBUTE_NAME, password);
}
return contextAttributes;
});
this.request.addParameter(OAuth2ParameterNames.USERNAME, "username");
this.request.addParameter(OAuth2ParameterNames.PASSWORD, "password");
// @formatter:off
OAuth2AuthorizeRequest authorizeRequest = OAuth2AuthorizeRequest
.withClientRegistrationId(this.clientRegistration.getRegistrationId())
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
this.authorizedClientManager.authorize(authorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
String username = authorizationContext.getAttribute(OAuth2AuthorizationContext.USERNAME_ATTRIBUTE_NAME);
assertThat(username).isEqualTo("username");
String password = authorizationContext.getAttribute(OAuth2AuthorizationContext.PASSWORD_ATTRIBUTE_NAME);
assertThat(password).isEqualTo("password");
}
@SuppressWarnings("unchecked")
@Test
public void reauthorizeWhenUnsupportedProviderThenNotReauthorized() {
// @formatter:off
OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient)
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(reauthorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
verify(this.contextAttributesMapper).apply(eq(reauthorizeRequest));
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient);
assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal);
assertThat(authorizedClient).isSameAs(this.authorizedClient);
verifyNoInteractions(this.authorizationSuccessHandler);
verify(this.authorizedClientRepository, never()).saveAuthorizedClient(any(OAuth2AuthorizedClient.class),
eq(this.principal), eq(this.request), eq(this.response));
}
@SuppressWarnings("unchecked")
@Test
public void reauthorizeWhenSupportedProviderThenReauthorized() {
OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration,
this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken());
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willReturn(reauthorizedClient);
// @formatter:off
OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient)
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize(reauthorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
verify(this.contextAttributesMapper).apply(eq(reauthorizeRequest));
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
assertThat(authorizationContext.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authorizationContext.getAuthorizedClient()).isSameAs(this.authorizedClient);
assertThat(authorizationContext.getPrincipal()).isEqualTo(this.principal);
assertThat(authorizedClient).isSameAs(reauthorizedClient);
verify(this.authorizationSuccessHandler).onAuthorizationSuccess(eq(reauthorizedClient), eq(this.principal),
any());
verify(this.authorizedClientRepository).saveAuthorizedClient(eq(reauthorizedClient), eq(this.principal),
eq(this.request), eq(this.response));
}
@Test
public void reauthorizeWhenRequestParameterScopeThenMappedToContext() {
OAuth2AuthorizedClient reauthorizedClient = new OAuth2AuthorizedClient(this.clientRegistration,
this.principal.getName(), TestOAuth2AccessTokens.noScopes(), TestOAuth2RefreshTokens.refreshToken());
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willReturn(reauthorizedClient);
// Override the mock with the default
this.authorizedClientManager
.setContextAttributesMapper(new DefaultOAuth2AuthorizedClientManager.DefaultContextAttributesMapper());
this.request.addParameter(OAuth2ParameterNames.SCOPE, "read write");
// @formatter:off
OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient)
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
this.authorizedClientManager.authorize(reauthorizeRequest);
verify(this.authorizedClientProvider).authorize(this.authorizationContextCaptor.capture());
OAuth2AuthorizationContext authorizationContext = this.authorizationContextCaptor.getValue();
String[] requestScopeAttribute = authorizationContext
.getAttribute(OAuth2AuthorizationContext.REQUEST_SCOPE_ATTRIBUTE_NAME);
assertThat(requestScopeAttribute).contains("read", "write");
}
@Test
public void reauthorizeWhenErrorCodeMatchThenRemoveAuthorizedClient() {
ClientAuthorizationException authorizationException = new ClientAuthorizationException(
new OAuth2Error(OAuth2ErrorCodes.INVALID_GRANT, null, null),
this.clientRegistration.getRegistrationId());
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willThrow(authorizationException);
// @formatter:off
OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient)
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
assertThatExceptionOfType(ClientAuthorizationException.class)
.isThrownBy(() -> this.authorizedClientManager.authorize(reauthorizeRequest))
.isEqualTo(authorizationException);
verify(this.authorizationFailureHandler).onAuthorizationFailure(eq(authorizationException), eq(this.principal),
any());
verify(this.authorizedClientRepository).removeAuthorizedClient(eq(this.clientRegistration.getRegistrationId()),
eq(this.principal), eq(this.request), eq(this.response));
}
@Test
public void reauthorizeWhenErrorCodeDoesNotMatchThenDoNotRemoveAuthorizedClient() {
ClientAuthorizationException authorizationException = new ClientAuthorizationException(
new OAuth2Error("non-matching-error-code", null, null), this.clientRegistration.getRegistrationId());
given(this.authorizedClientProvider.authorize(any(OAuth2AuthorizationContext.class)))
.willThrow(authorizationException);
// @formatter:off
OAuth2AuthorizeRequest reauthorizeRequest = OAuth2AuthorizeRequest.withAuthorizedClient(this.authorizedClient)
.principal(this.principal)
.attributes((attrs) -> {
attrs.put(HttpServletRequest.class.getName(), this.request);
attrs.put(HttpServletResponse.class.getName(), this.response);
})
.build();
// @formatter:on
assertThatExceptionOfType(ClientAuthorizationException.class)
.isThrownBy(() -> this.authorizedClientManager.authorize(reauthorizeRequest))
.isEqualTo(authorizationException);
verify(this.authorizationFailureHandler).onAuthorizationFailure(eq(authorizationException), eq(this.principal),
any());
verifyNoInteractions(this.authorizedClientRepository);
}
}
| |
package totalcommander.logic;
import java.awt.Desktop;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Scanner;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.filechooser.FileSystemView;
import totalcommander.logic.comparators.DateAscComparator;
import totalcommander.logic.comparators.DateDescComparator;
import totalcommander.logic.comparators.ExtensionAscComparator;
import totalcommander.logic.comparators.ExtensionDescComparator;
import totalcommander.logic.comparators.NameAscComparator;
import totalcommander.logic.comparators.NameDescComparator;
import totalcommander.logic.comparators.SizeAscComparator;
import totalcommander.logic.comparators.SizeDescComparator;
public class FileHandler {
private File workingDirectory = new File(System.getProperty("user.dir"));
private Comparator<FileData> sort = new NameAscComparator();
//For test only
public File getWd(){
return workingDirectory;
}
//For test only
public Comparator<FileData> getComp(){
return sort;
}
public static class FileData{
private String fileName;
private String fileExtension;
private Object fileLength;
private Object fileFormattedLength;
private String fileDate;
private Icon fileIcon;
public FileData(Icon fileIcon, String fileName, String fileExtension, Object fileLength, Object fileFormattedLength, String fileDate){
this.fileIcon = fileIcon;
this.fileName = fileName;
this.fileExtension = fileExtension;
this.fileLength = fileLength;
this.fileFormattedLength = fileFormattedLength;
this.fileDate = fileDate;
}
public String getName(){
return fileName;
}
public String getExtension(){
return fileExtension;
}
public Object getLength(){
return fileLength;
}
public Object getFormattedLength(){
return fileFormattedLength;
}
public String getDate(){
return fileDate;
}
public Icon getIcon(){
return fileIcon;
}
}
public List<FileData> listFileDatas(){
List<FileData> fileDatas = new ArrayList<FileData>();
File[] files =workingDirectory.listFiles();
File tempDirectory = new File("user.dir");
try{
tempDirectory = new File (workingDirectory.getCanonicalPath());
}catch(Exception e){
}
//if we're not in the root, put .. to list
if (tempDirectory.toPath().getNameCount()!=0){
SimpleDateFormat sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm");
fileDatas.add(new FileData((Icon)(new ImageIcon(getClass().getResource("/resources/back.png"))),"..", "", "<DIR>","<DIR>", sdf.format(workingDirectory.lastModified())));
}
for (int i=0; i<files.length; i++){
String fileName;
String fileExtension;
Object fileLength;
Object fileFormattedLength;
String fileDate;
Icon fileIcon;
//Icon
fileIcon= FileSystemView.getFileSystemView().getSystemIcon(files[i]);
//Name
int lastDot = files[i].getName().lastIndexOf('.');
if(files[i].isFile()){
if(lastDot >0)
fileName = files[i].getName().substring(0, lastDot);
else
fileName = files[i].getName();
}else
fileName = files[i].getName();
//Extension
if (files[i].isFile() && lastDot>0)
fileExtension=(files[i].getName()).substring(lastDot+1);
else
fileExtension="";
//Length
if (files[i].isFile())
fileLength = files[i].length();
else
fileLength = "<DIR>";
//Formatted length
if (files[i].isFile()){
DecimalFormat myFormatter = new DecimalFormat("#,###");
fileFormattedLength = myFormatter.format(files[i].length());
}else
fileFormattedLength = ("<DIR>");
//Date
SimpleDateFormat sdf = new SimpleDateFormat("yyyy.MM.dd HH:mm");
fileDate = sdf.format(files[i].lastModified());
fileDatas.add(new FileData(fileIcon, fileName, fileExtension, fileLength, fileFormattedLength, fileDate));
}
fileDatas.sort(sort);
return fileDatas;
}
public void setComparator(String sort){
switch(sort){
case "Name":
if (this.sort instanceof NameAscComparator){
this.sort = new NameDescComparator();
}else if ( this.sort instanceof NameDescComparator){
this.sort = new NameAscComparator();
}else{
this.sort = new NameAscComparator();
}
break;
case "Ext":
if (this.sort instanceof ExtensionAscComparator){
this.sort = new ExtensionDescComparator();
}else if ( this.sort instanceof ExtensionDescComparator){
this.sort = new ExtensionAscComparator();
}else{
this.sort = new ExtensionAscComparator();
}
break;
case "Size":
if (this.sort instanceof SizeAscComparator){
this.sort = new SizeDescComparator();
}else if ( this.sort instanceof SizeDescComparator){
this.sort = new SizeAscComparator();
}else{
this.sort = new SizeAscComparator();
}
break;
case "Date":
if (this.sort instanceof DateAscComparator){
this.sort = new DateDescComparator();
}else if ( this.sort instanceof DateDescComparator){
this.sort = new DateAscComparator();
}else{
this.sort = new DateAscComparator();
}
break;
}
}
public void back(){
File tempDirectory = new File(workingDirectory.getPath() + "/..");
if (tempDirectory.exists()){
workingDirectory = tempDirectory;
}
}
public boolean explore(String[] item){
if (item[2].equals("<DIR>"))
return goIntoDirectory(item[0]);
else
return openFile(item);
}
private boolean goIntoDirectory(String where){
File tempDirectory = new File(workingDirectory.getPath() + "/" + where);
if (tempDirectory.isDirectory() && tempDirectory.listFiles() != null){
workingDirectory = tempDirectory;
return true;
}
//dont have permission
else if (tempDirectory.isDirectory() && tempDirectory.listFiles() == null){
return false;
}
return false;
}
private boolean openFile(String[] file){
File tempDirectory = new File(workingDirectory.getPath() + "/" + file[0]+"."+file[1]);
if (Desktop.isDesktopSupported() && tempDirectory.isFile()) {
try {
if (Desktop.getDesktop().isSupported(Desktop.Action.OPEN)) {
Desktop.getDesktop().open(tempDirectory);
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
}
return false;
}
public void switchDrive(Object drive){
File tempDirectory = (File)drive;
if(tempDirectory.isDirectory() && tempDirectory.listFiles() != null){
workingDirectory = tempDirectory;
}
}
public String getPath(){
try{
return workingDirectory.getCanonicalPath()+"\\";
}catch(Exception e){
e.printStackTrace();
return "Cannot get path!";
}
}
public String getSpace(Object drive){
File tempDirectory = new File(drive.toString());
StringBuilder string = new StringBuilder();
DecimalFormat myFormatter = new DecimalFormat("#,###");
String free = myFormatter.format(tempDirectory.getUsableSpace()/1024);
String total = myFormatter.format(tempDirectory.getTotalSpace()/1024);
string.append(free);
string.append(" k of ");
string.append(total);
string.append(" k free");
return string.toString();
}
public String view(String[] file){
if (!file[2].equals("<DIR>")){
try{
Scanner scan = new Scanner(new File(workingDirectory.getPath() + "/" + file[0]+"."+file[1]));
scan.useDelimiter("\\Z");
String content = scan.next();
scan.close();
System.out.println(content);
return content;
}catch(Exception e){
e.printStackTrace();
return null;
}
}
return null;
}
public boolean copy(String[] item, String destination){
if(item[0].equals(".."))
return false;
//directory copy
if(item[2].equals("<DIR>")){
Path source = Paths.get(workingDirectory.getPath()+"/"+item[0]);
Path dest = Paths.get(destination+"/"+item[0]);
try{
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
return true;
}catch(Exception e){
e.printStackTrace();
return false;
}
//file copy
}else{
Path source = Paths.get(workingDirectory.getPath()+"/"+item[0]+"."+item[1]);
Path dest = Paths.get(destination+"/"+item[0]+"."+item[1]);
try{
Files.copy(source, dest, StandardCopyOption.REPLACE_EXISTING);
return true;
}catch(Exception e){
e.printStackTrace();
return false;
}
}
}
public boolean move(String[] item, String destination){
if(item[0].equals(".."))
return false;
//directory tansfer
if(item[2].equals("<DIR>")){
Path source = Paths.get(workingDirectory.getPath()+"/"+item[0]);
Path dest = Paths.get(destination+"/"+item[0]);
try{
Files.move(source, dest, StandardCopyOption.REPLACE_EXISTING);
return true;
}catch(Exception e){
e.printStackTrace();
return false;
}
//file transfer
}else{
Path source = Paths.get(workingDirectory.getPath()+"/"+item[0]+"."+item[1]);
Path dest = Paths.get(destination+"/"+item[0]+"."+item[1]);
try{
Files.move(source, dest, StandardCopyOption.REPLACE_EXISTING);
return true;
}catch(Exception e){
e.printStackTrace();
return false;
}
}
}
public boolean newFolder(String name){
File tempDirectory = new File(workingDirectory.getPath()+"/"+name);
return tempDirectory.mkdir();
}
public boolean deleteSelected(String[] what){
if(what[0].equals(".."))
return false;
if (what[2].equals("<DIR>")){ //is directory
try{
File toBeDeleted = new File(workingDirectory.getCanonicalPath()+"/"+what[0]);
if (toBeDeleted.exists() && toBeDeleted.isDirectory()){
return deleteDirectory(toBeDeleted);
}
}catch(Exception e){
e.printStackTrace();
}
}else{ //is file
try{
String file = what[0]+"."+what[1];
File toBeDeleted = new File(workingDirectory.getCanonicalPath()+"/"+file);
if (toBeDeleted.exists() && toBeDeleted.isFile()){
return toBeDeleted.delete();
}
}catch(Exception e){
e.printStackTrace();
}
}
return false;
}
private boolean deleteDirectory(File what){
if(what.exists()){
File[] files = what.listFiles();
if(files != null)
for (int i=0; i<files.length; i++){
if(files[i].isDirectory())
deleteDirectory(files[i]);
else
files[i].delete();
}
}
return(what.delete());
}
}
| |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.http.nio.netty.internal.nrs;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.util.concurrent.EventExecutor;
import java.util.concurrent.atomic.AtomicBoolean;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import software.amazon.awssdk.annotations.SdkInternalApi;
import software.amazon.awssdk.http.nio.netty.internal.utils.OrderedWriteChannelHandlerContext;
import software.amazon.awssdk.utils.Validate;
/**
* Subscriber that publishes received messages to the handler pipeline.
*
* This class contains source imported from https://github.com/playframework/netty-reactive-streams,
* licensed under the Apache License 2.0, available at the time of the fork (1/31/2020) here:
* https://github.com/playframework/netty-reactive-streams/blob/master/LICENSE.txt
*
* All original source licensed under the Apache License 2.0 by playframework. All modifications are
* licensed under the Apache License 2.0 by Amazon Web Services.
*/
@SdkInternalApi
public class HandlerSubscriber<T> extends ChannelDuplexHandler implements Subscriber<T> {
static final long DEFAULT_LOW_WATERMARK = 4;
static final long DEFAULT_HIGH_WATERMARK = 16;
private final EventExecutor executor;
private final long demandLowWatermark;
private final long demandHighWatermark;
private final AtomicBoolean hasSubscription = new AtomicBoolean();
private volatile Subscription subscription;
private volatile ChannelHandlerContext ctx;
private State state = HandlerSubscriber.State.NO_SUBSCRIPTION_OR_CONTEXT;
private long outstandingDemand = 0;
private ChannelFuture lastWriteFuture;
/**
* Create a new handler subscriber.
*
* The supplied executor must be the same event loop as the event loop that this handler is eventually registered
* with, if not, an exception will be thrown when the handler is registered.
*
* @param executor The executor to execute asynchronous events from the publisher on.
* @param demandLowWatermark The low watermark for demand. When demand drops below this, more will be requested.
* @param demandHighWatermark The high watermark for demand. This is the maximum that will be requested.
*/
public HandlerSubscriber(EventExecutor executor, long demandLowWatermark, long demandHighWatermark) {
this.executor = executor;
this.demandLowWatermark = demandLowWatermark;
this.demandHighWatermark = demandHighWatermark;
}
/**
* Create a new handler subscriber with the default low and high watermarks.
*
* The supplied executor must be the same event loop as the event loop that this handler is eventually registered
* with, if not, an exception will be thrown when the handler is registered.
*
* @param executor The executor to execute asynchronous events from the publisher on.
* @see #HandlerSubscriber(EventExecutor, long, long)
*/
public HandlerSubscriber(EventExecutor executor) {
this(executor, DEFAULT_LOW_WATERMARK, DEFAULT_HIGH_WATERMARK);
}
/**
* Override for custom error handling. By default, it closes the channel.
*
* @param error The error to handle.
*/
protected void error(Throwable error) {
doClose();
}
/**
* Override for custom completion handling. By default, it closes the channel.
*/
protected void complete() {
doClose();
}
enum State {
NO_SUBSCRIPTION_OR_CONTEXT,
NO_SUBSCRIPTION,
NO_CONTEXT,
INACTIVE,
RUNNING,
CANCELLED,
COMPLETE
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
verifyRegisteredWithRightExecutor(ctx);
// Ensure that writes to the context happen consecutively, even if they're performed from within the event loop.
// See https://github.com/netty/netty/issues/7783
ctx = OrderedWriteChannelHandlerContext.wrap(ctx);
switch (state) {
case NO_SUBSCRIPTION_OR_CONTEXT:
this.ctx = ctx;
// We were in no subscription or context, now we just don't have a subscription.
state = HandlerSubscriber.State.NO_SUBSCRIPTION;
break;
case NO_CONTEXT:
this.ctx = ctx;
// We were in no context, we're now fully initialised
maybeStart();
break;
case COMPLETE:
// We are complete, close
state = HandlerSubscriber.State.COMPLETE;
ctx.close();
break;
default:
throw new IllegalStateException("This handler must only be added to a pipeline once " + state);
}
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
verifyRegisteredWithRightExecutor(ctx);
ctx.fireChannelRegistered();
}
private void verifyRegisteredWithRightExecutor(ChannelHandlerContext ctx) {
if (ctx.channel().isRegistered() && !executor.inEventLoop()) {
throw new IllegalArgumentException("Channel handler MUST be registered with the same EventExecutor that "
+ "it is created with.");
}
}
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
maybeRequestMore();
ctx.fireChannelWritabilityChanged();
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
if (state == HandlerSubscriber.State.INACTIVE) {
state = HandlerSubscriber.State.RUNNING;
maybeRequestMore();
}
ctx.fireChannelActive();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
cancel();
ctx.fireChannelInactive();
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
cancel();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
cancel();
ctx.fireExceptionCaught(cause);
}
private void cancel() {
switch (state) {
case NO_SUBSCRIPTION:
state = HandlerSubscriber.State.CANCELLED;
break;
case RUNNING:
case INACTIVE:
subscription.cancel();
state = HandlerSubscriber.State.CANCELLED;
break;
default:
// ignore
}
}
@Override
public void onSubscribe(final Subscription subscription) {
if (subscription == null) {
throw new NullPointerException("Null subscription");
} else if (!hasSubscription.compareAndSet(false, true)) {
subscription.cancel();
} else {
this.subscription = subscription;
executor.execute(new Runnable() {
@Override
public void run() {
provideSubscription();
}
});
}
}
private void provideSubscription() {
switch (state) {
case NO_SUBSCRIPTION_OR_CONTEXT:
state = HandlerSubscriber.State.NO_CONTEXT;
break;
case NO_SUBSCRIPTION:
maybeStart();
break;
case CANCELLED:
subscription.cancel();
break;
default:
// ignore
}
}
private void maybeStart() {
if (ctx.channel().isActive()) {
state = HandlerSubscriber.State.RUNNING;
maybeRequestMore();
} else {
state = HandlerSubscriber.State.INACTIVE;
}
}
@Override
public void onNext(T t) {
// Publish straight to the context.
Validate.notNull(t, "Event must not be null.");
lastWriteFuture = ctx.writeAndFlush(t);
lastWriteFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
outstandingDemand--;
maybeRequestMore();
}
});
}
@Override
public void onError(final Throwable error) {
if (error == null) {
throw new NullPointerException("Null error published");
}
error(error);
}
@Override
public void onComplete() {
if (lastWriteFuture == null) {
complete();
} else {
lastWriteFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture channelFuture) throws Exception {
complete();
}
});
}
}
private void doClose() {
executor.execute(new Runnable() {
@Override
public void run() {
switch (state) {
case NO_SUBSCRIPTION:
case INACTIVE:
case RUNNING:
ctx.close();
state = HandlerSubscriber.State.COMPLETE;
break;
default:
// ignore
}
}
});
}
private void maybeRequestMore() {
if (outstandingDemand <= demandLowWatermark && ctx.channel().isWritable()) {
long toRequest = demandHighWatermark - outstandingDemand;
outstandingDemand = demandHighWatermark;
subscription.request(toRequest);
}
}
}
| |
package io.fabric8.openshift.api.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.fabric8.kubernetes.api.model.Container;
import io.fabric8.kubernetes.api.model.IntOrString;
import io.fabric8.kubernetes.api.model.KubernetesResource;
import io.fabric8.kubernetes.api.model.LabelSelector;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.ObjectReference;
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
import io.fabric8.kubernetes.api.model.PodTemplateSpec;
import io.fabric8.kubernetes.api.model.ResourceRequirements;
import io.sundr.builder.annotations.Buildable;
import io.sundr.builder.annotations.BuildableReference;
import lombok.EqualsAndHashCode;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
@JsonDeserialize(using = com.fasterxml.jackson.databind.JsonDeserializer.None.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"apiVersion",
"kind",
"metadata",
"binary",
"configMaps",
"contextDir",
"dockerfile",
"git",
"images",
"secrets",
"sourceSecret",
"type"
})
@ToString
@EqualsAndHashCode
@Setter
@Accessors(prefix = {
"_",
""
})
@Buildable(editableEnabled = false, validationEnabled = false, generateBuilderPackage = false, lazyCollectionInitEnabled = false, builderPackage = "io.fabric8.kubernetes.api.builder", refs = {
@BuildableReference(ObjectMeta.class),
@BuildableReference(LabelSelector.class),
@BuildableReference(Container.class),
@BuildableReference(PodTemplateSpec.class),
@BuildableReference(ResourceRequirements.class),
@BuildableReference(IntOrString.class),
@BuildableReference(ObjectReference.class),
@BuildableReference(io.fabric8.kubernetes.api.model.LocalObjectReference.class),
@BuildableReference(PersistentVolumeClaim.class)
})
public class BuildSource implements KubernetesResource
{
@JsonProperty("binary")
private BinaryBuildSource binary;
@JsonProperty("configMaps")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<ConfigMapBuildSource> configMaps = new ArrayList<ConfigMapBuildSource>();
@JsonProperty("contextDir")
private String contextDir;
@JsonProperty("dockerfile")
private String dockerfile;
@JsonProperty("git")
private GitBuildSource git;
@JsonProperty("images")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<ImageSource> images = new ArrayList<ImageSource>();
@JsonProperty("secrets")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private List<SecretBuildSource> secrets = new ArrayList<SecretBuildSource>();
@JsonProperty("sourceSecret")
private io.fabric8.kubernetes.api.model.LocalObjectReference sourceSecret;
@JsonProperty("type")
private String type;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* No args constructor for use in serialization
*
*/
public BuildSource() {
}
/**
*
* @param sourceSecret
* @param images
* @param git
* @param configMaps
* @param binary
* @param dockerfile
* @param type
* @param secrets
* @param contextDir
*/
public BuildSource(BinaryBuildSource binary, List<ConfigMapBuildSource> configMaps, String contextDir, String dockerfile, GitBuildSource git, List<ImageSource> images, List<SecretBuildSource> secrets, io.fabric8.kubernetes.api.model.LocalObjectReference sourceSecret, String type) {
super();
this.binary = binary;
this.configMaps = configMaps;
this.contextDir = contextDir;
this.dockerfile = dockerfile;
this.git = git;
this.images = images;
this.secrets = secrets;
this.sourceSecret = sourceSecret;
this.type = type;
}
@JsonProperty("binary")
public BinaryBuildSource getBinary() {
return binary;
}
@JsonProperty("binary")
public void setBinary(BinaryBuildSource binary) {
this.binary = binary;
}
@JsonProperty("configMaps")
public List<ConfigMapBuildSource> getConfigMaps() {
return configMaps;
}
@JsonProperty("configMaps")
public void setConfigMaps(List<ConfigMapBuildSource> configMaps) {
this.configMaps = configMaps;
}
@JsonProperty("contextDir")
public String getContextDir() {
return contextDir;
}
@JsonProperty("contextDir")
public void setContextDir(String contextDir) {
this.contextDir = contextDir;
}
@JsonProperty("dockerfile")
public String getDockerfile() {
return dockerfile;
}
@JsonProperty("dockerfile")
public void setDockerfile(String dockerfile) {
this.dockerfile = dockerfile;
}
@JsonProperty("git")
public GitBuildSource getGit() {
return git;
}
@JsonProperty("git")
public void setGit(GitBuildSource git) {
this.git = git;
}
@JsonProperty("images")
public List<ImageSource> getImages() {
return images;
}
@JsonProperty("images")
public void setImages(List<ImageSource> images) {
this.images = images;
}
@JsonProperty("secrets")
public List<SecretBuildSource> getSecrets() {
return secrets;
}
@JsonProperty("secrets")
public void setSecrets(List<SecretBuildSource> secrets) {
this.secrets = secrets;
}
@JsonProperty("sourceSecret")
public io.fabric8.kubernetes.api.model.LocalObjectReference getSourceSecret() {
return sourceSecret;
}
@JsonProperty("sourceSecret")
public void setSourceSecret(io.fabric8.kubernetes.api.model.LocalObjectReference sourceSecret) {
this.sourceSecret = sourceSecret;
}
@JsonProperty("type")
public String getType() {
return type;
}
@JsonProperty("type")
public void setType(String type) {
this.type = type;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.test.integration.controller.server;
import com.google.common.collect.ImmutableMap;
import io.pravega.client.segment.impl.Segment;
import io.pravega.client.stream.ScalingPolicy;
import io.pravega.client.stream.Stream;
import io.pravega.client.stream.StreamConfiguration;
import io.pravega.client.stream.StreamCut;
import io.pravega.client.stream.ReaderGroupConfig;
import io.pravega.client.control.impl.Controller;
import io.pravega.client.stream.impl.StreamImpl;
import io.pravega.client.stream.impl.StreamSegments;
import io.pravega.client.stream.impl.StreamCutImpl;
import io.pravega.client.tables.KeyValueTableConfiguration;
import io.pravega.common.Exceptions;
import io.pravega.common.concurrent.ExecutorServiceHelpers;
import io.pravega.common.concurrent.Futures;
import io.pravega.controller.store.stream.StoreException;
import io.pravega.segmentstore.contracts.StreamSegmentStore;
import io.pravega.segmentstore.contracts.tables.TableStore;
import io.pravega.segmentstore.server.host.handler.PravegaConnectionListener;
import io.pravega.segmentstore.server.store.ServiceBuilder;
import io.pravega.segmentstore.server.store.ServiceBuilderConfig;
import io.pravega.shared.NameUtils;
import io.pravega.test.common.AssertExtensions;
import io.pravega.test.common.TestUtils;
import io.pravega.test.common.TestingServerStarter;
import io.pravega.test.integration.demo.ControllerWrapper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import lombok.extern.slf4j.Slf4j;
import org.apache.curator.test.TestingServer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static io.pravega.test.common.AssertExtensions.assertThrows;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@Slf4j
public class ControllerServiceTest {
private final int controllerPort = TestUtils.getAvailableListenPort();
private final String serviceHost = "localhost";
private final int servicePort = TestUtils.getAvailableListenPort();
private final int containerCount = 4;
private TestingServer zkTestServer;
private PravegaConnectionListener server;
private ControllerWrapper controllerWrapper;
private ServiceBuilder serviceBuilder;
private ScheduledExecutorService executor;
@Before
public void setUp() throws Exception {
zkTestServer = new TestingServerStarter().start();
serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), false,
controllerPort, serviceHost, servicePort, containerCount);
controllerWrapper.awaitRunning();
executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "collector");
}
@After
public void tearDown() throws Exception {
ExecutorServiceHelpers.shutdown(executor);
controllerWrapper.close();
server.close();
serviceBuilder.close();
zkTestServer.close();
}
@Test
public void streamTagTest() {
final String scope = "sc";
final String stream = "st";
final String stream2 = "st2";
Controller controller = controllerWrapper.getController();
controller.createScope(scope).join();
System.out.println("scope created");
// Create Stream with tags t1, t2
StreamConfiguration strCfg = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).tag("t1").tag("t2").build();
controller.createStream(scope, stream, strCfg).join();
assertEquals(Set.of("t1", "t2"), controller.getStreamConfiguration(scope, stream).join().getTags());
// Update stream to have tags t2, t3
StreamConfiguration strCfgNew = strCfg.toBuilder().clearTags().tags(Set.of("t2", "t3")).build();
controller.updateStream(scope, stream, strCfgNew).join();
// Check if the stream tags are infact t2, t3
assertEquals(Set.of("t2", "t3"), controller.getStreamConfiguration(scope, stream).join().getTags());
// List Streams with tag t2. only one stream should be listed
assertEquals(Collections.singletonList(stream), listStreamsForTag(scope, controller, "t2"));
// Create stream2 with tags t1, t2
controller.createStream(scope, stream2, strCfg).join();
// List Streams with tag t2. two stream should be listed
assertEquals(Arrays.asList(stream2, stream), listStreamsForTag(scope, controller, "t2"));
controller.sealStream(scope, stream2).join();
controller.deleteStream(scope, stream2).join();
// List Streams with tag t2. two stream should be listed
assertEquals(Arrays.asList(stream), listStreamsForTag(scope, controller, "t2"));
assertEquals(strCfgNew, controller.getStreamConfiguration(scope, stream).join());
controller.sealStream(scope, stream).join();
controller.deleteStream(scope, stream).join();
assertEquals(Collections.emptyList(), listStreamsForTag(scope, controller, "t2"));
}
private ArrayList<String> listStreamsForTag(String scope, Controller controller, String tag) {
ArrayList<String> resultList = new ArrayList<String>();
controller.listStreamsForTag(scope, tag)
.collectRemaining(stream1 -> resultList.add(stream1.getStreamName()))
.join();
return resultList;
}
@Test(timeout = 40000)
public void streamMetadataTest() throws Exception {
final String scope = "testScope";
final String stream = "testStream";
StreamConfiguration streamConfiguration = StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.fixed(1))
.build();
Controller controller = controllerWrapper.getController();
// Create test scope. This operation should succeed.
assertTrue(controller.createScope(scope).join());
// Delete the test scope. This operation should also succeed.
assertTrue(controller.deleteScope(scope).join());
// Try creating a stream. It should fail, since the scope does not exist.
assertFalse(Futures.await(controller.createStream(scope, stream, streamConfiguration)));
// Again create the scope.
assertTrue(controller.createScope(scope).join());
// Try creating the stream again. It should succeed now, since the scope exists.
assertTrue(controller.createStream(scope, stream, streamConfiguration).join());
// Delete test scope. This operation should fail, since it is not empty.
assertFalse(Futures.await(controller.deleteScope(scope)));
// Delete a non-existent scope.
assertFalse(controller.deleteScope("non_existent_scope").get());
// Create a scope with invalid characters. It should fail.
assertFalse(Futures.await(controller.createScope("abc/def")));
// Try creating already existing scope.
assertFalse(controller.createScope(scope).join());
// Try creating stream with invalid characters. It should fail.
assertFalse(Futures.await(controller.createStream(scope, "abc/def", StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.fixed(1))
.build())));
// Try creating already existing stream.
assertFalse(controller.createStream(scope, stream, streamConfiguration).join());
}
@Test(timeout = 80000)
public void testControllerService() throws Exception {
final String scope1 = "scope1";
final String scope2 = "scope2";
controllerWrapper.getControllerService().createScope("scope1", 0L).get();
controllerWrapper.getControllerService().createScope("scope2", 0L).get();
Controller controller = controllerWrapper.getController();
final String streamName1 = "stream1";
final String streamName2 = "stream2";
final String streamName3 = "stream3";
final ScalingPolicy scalingPolicy = ScalingPolicy.fixed(2);
final StreamConfiguration config1 = StreamConfiguration.builder()
.scalingPolicy(scalingPolicy)
.build();
final StreamConfiguration config2 = StreamConfiguration.builder()
.scalingPolicy(scalingPolicy)
.build();
final StreamConfiguration config3 = StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.fixed(3))
.build();
createAStream(scope1, streamName1, controller, config1);
//Same name in different scope
createAStream(scope2, streamName1, controller, config2);
//Different name in same scope
createAStream(scope1, streamName2, controller, config3);
createAStream(scope1, streamName3, controller, config3);
final String kvtName1 = "kvtable1";
final String kvtName2 = "kvtable2";
final KeyValueTableConfiguration kvtConfig1 = KeyValueTableConfiguration.builder()
.partitionCount(2).primaryKeyLength(4).secondaryKeyLength(4).build();
createAKeyValueTable(scope1, kvtName1, controller, kvtConfig1);
//Same name in different scope
createAKeyValueTable(scope2, kvtName1, controller, kvtConfig1);
//Different name in different scope
createAKeyValueTable(scope2, kvtName2, controller, kvtConfig1);
final String scopeSeal = "scopeSeal";
final String streamNameSeal = "streamSeal";
sealAStream(controllerWrapper, controller, scalingPolicy, scopeSeal, streamNameSeal);
sealASealedStream(controller, scopeSeal, streamNameSeal);
sealNonExistantStream(controller, scopeSeal);
streamDuplicationNotAllowed(scope1, streamName1, controller, config1);
//update stream config section
updateStreamName(controller, scope1, scalingPolicy);
updateScalingPolicy(controller, scope1, streamName1);
updateTargetRate(controller, scope1, streamName1);
updateScaleFactor(controller, scope1, streamName1);
updataMinSegmentes(controller, scope1, streamName1);
updateConfigOfNonExistantStream(controller);
//get currently active segments
getActiveSegments(controller, scope1, streamName1);
getActiveSegmentsForNonExistentStream(controller);
//get positions at a given time stamp
getSegmentsAtTime(controller, scope1, streamName1);
getSegmentsAtTime(controller, scope1, streamName2);
getSegmentsForNonExistentStream(controller);
getSegmentsBeforeCreation(controller, scope1, streamName1);
getSegmentsAfterCreation(controller, scope1, streamName1);
readerGroupsTest(controller, scope1, streamName1, streamName2, streamName3);
updateSubscriberStreamCutTest(controller, scope2, streamName1);
}
private static void getSegmentsAfterCreation(Controller controller, final String scope,
final String streamName) throws InterruptedException,
ExecutionException {
CompletableFuture<Map<Segment, Long>> segments = controller.getSegmentsAtTime(new StreamImpl(scope, streamName), System.currentTimeMillis() + 3600);
assertFalse("FAILURE: Fetching positions at given time in furture after stream creation failed", segments.get().isEmpty());
}
private static void getSegmentsBeforeCreation(Controller controller, final String scope,
final String streamName) throws InterruptedException,
ExecutionException {
CompletableFuture<Map<Segment, Long>> segments = controller.getSegmentsAtTime(new StreamImpl(scope, streamName), System.currentTimeMillis() - 36000);
assertFalse("FAILURE: Fetching positions at given time before stream creation failed", segments.get().size() == 1);
}
private static void getSegmentsForNonExistentStream(Controller controller) throws InterruptedException {
Stream stream = new StreamImpl("scope", "streamName");
try {
CompletableFuture<Map<Segment, Long>> segments = controller.getSegmentsAtTime(stream, System.currentTimeMillis());
assertTrue("FAILURE: Fetching positions for non existent stream", segments.get().isEmpty());
log.info("SUCCESS: Positions cannot be fetched for non existent stream");
} catch (ExecutionException | CompletionException e) {
assertTrue("FAILURE: Fetching positions for non existent stream", Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
log.info("SUCCESS: Positions cannot be fetched for non existent stream");
}
}
private static void getSegmentsAtTime(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
CompletableFuture<Map<Segment, Long>> segments = controller.getSegmentsAtTime(new StreamImpl(scope, streamName), System.currentTimeMillis());
assertFalse("FAILURE: Fetching positions at given time stamp failed", segments.get().isEmpty());
}
private static void getActiveSegmentsForNonExistentStream(Controller controller) {
AssertExtensions.assertFutureThrows("", controller.getCurrentSegments("scope", "streamName"),
e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
}
private static void getActiveSegments(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
CompletableFuture<StreamSegments> getActiveSegments = controller.getCurrentSegments(scope, streamName);
assertFalse("FAILURE: Fetching active segments failed", getActiveSegments.get().getSegments().isEmpty());
}
private static void updateConfigOfNonExistantStream(Controller controller) {
assertFalse(Futures.await(controller.updateStream("scope", "streamName", StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.byEventRate(200, 2, 3))
.build())));
}
private static void updataMinSegmentes(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
assertTrue(controller.updateStream(scope, streamName, StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.byEventRate(200, 2, 3))
.build()).get());
assertEquals(3, controller.getCurrentSegments(scope, streamName).get().getSegments().size());
}
private static void updateScaleFactor(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
assertTrue(controller.updateStream(scope, streamName, StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.byEventRate(100, 3, 2))
.build()).get());
}
private static void updateTargetRate(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
assertTrue(controller.updateStream(scope, streamName, StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.byEventRate(200, 2, 2))
.build()).get());
}
private static void updateScalingPolicy(Controller controller, final String scope,
final String streamName) throws InterruptedException, ExecutionException {
assertTrue(controller.updateStream(scope, streamName, StreamConfiguration.builder()
.scalingPolicy(ScalingPolicy.byEventRate(100, 2, 2))
.build()).get());
}
private static void updateStreamName(Controller controller, final String scope,
final ScalingPolicy scalingPolicy) {
assertFalse(Futures.await(controller.updateStream(scope, "stream4", StreamConfiguration.builder()
.scalingPolicy(scalingPolicy)
.build())));
}
private static void readerGroupsTest(Controller controller, final String scope, final String stream1,
final String stream2, final String stream3) throws InterruptedException, ExecutionException {
final String scopedStreamName1 = NameUtils.getScopedStreamName(scope, stream1);
final String scopedStreamName2 = NameUtils.getScopedStreamName(scope, stream2);
final String scopedStreamName3 = NameUtils.getScopedStreamName(scope, stream3);
final Segment seg0 = new Segment(scope, stream1, 0L);
final Segment seg1 = new Segment(scope, stream1, 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 10L, seg1, 10L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(
Stream.of(scope, stream1), new StreamCutImpl(Stream.of(scope, stream1), startStreamCut),
Stream.of(scope, stream2), new StreamCutImpl(Stream.of(scope, stream2), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 200L, seg1, 300L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(
Stream.of(scope, stream1), new StreamCutImpl(Stream.of(scope, stream1), endStreamCut),
Stream.of(scope, stream2), new StreamCutImpl(Stream.of(scope, stream2), endStreamCut));
ReaderGroupConfig rgConfig = ReaderGroupConfig.builder()
.automaticCheckpointIntervalMillis(30000L)
.groupRefreshTimeMillis(20000L)
.maxOutstandingCheckpointRequest(2)
.retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT)
.startingStreamCuts(startSC)
.endingStreamCuts(endSC).build();
final ReaderGroupConfig rgConfig1 = ReaderGroupConfig.cloneConfig(rgConfig, UUID.randomUUID(), 0L);
// Create Reader Group rg1
ReaderGroupConfig createRGResult = controller.createReaderGroup(scope, "rg1", rgConfig1).get();
assertEquals(rgConfig1.getReaderGroupId(), createRGResult.getReaderGroupId());
assertThrows(IllegalArgumentException.class, () -> controller.createReaderGroup(scope, "bad_rg_name", rgConfig1).get());
assertThrows(IllegalArgumentException.class, () -> controller.createReaderGroup("badscope", "rg3", rgConfig1).get());
ReaderGroupConfig rgConfig2 = ReaderGroupConfig.builder()
.retentionType(ReaderGroupConfig.StreamDataRetention.MANUAL_RELEASE_AT_USER_STREAMCUT)
.stream(scopedStreamName1).stream(scopedStreamName2)
.automaticCheckpointIntervalMillis(7000L)
.build();
rgConfig2 = ReaderGroupConfig.cloneConfig(rgConfig2, UUID.randomUUID(), 0L);
// Create Reader Group rg2
createRGResult = controller.createReaderGroup(scope, "rg2", rgConfig2).get();
assertEquals(rgConfig2.getReaderGroupId(), createRGResult.getReaderGroupId());
List<String> subscribers = controller.listSubscribers(scope, stream1).get();
assertTrue(subscribers.size() == 2);
assertTrue(controller.deleteReaderGroup(scope, "rg2", rgConfig2.getReaderGroupId()).get());
assertThrows(IllegalArgumentException.class, () -> controller.getReaderGroupConfig(scope, "rg2").get());
subscribers = controller.listSubscribers(scope, stream1).get();
assertTrue(subscribers.size() == 1);
ReaderGroupConfig config = controller.getReaderGroupConfig(scope, "rg1").get();
assertEquals(rgConfig1.getGroupRefreshTimeMillis(), config.getGroupRefreshTimeMillis());
assertEquals(rgConfig1.getGeneration(), config.getGeneration());
assertEquals(rgConfig1.getMaxOutstandingCheckpointRequest(), config.getMaxOutstandingCheckpointRequest());
assertEquals(rgConfig1.getRetentionType(), config.getRetentionType());
assertEquals(rgConfig1.getReaderGroupId(), config.getReaderGroupId());
assertEquals(rgConfig1.getStartingStreamCuts().keySet().size(), config.getStartingStreamCuts().keySet().size());
assertEquals(rgConfig1.getEndingStreamCuts().keySet().size(), config.getEndingStreamCuts().keySet().size());
assertTrue(config.getStartingStreamCuts().keySet().contains(Stream.of(scope, stream1)));
assertTrue(config.getStartingStreamCuts().keySet().contains(Stream.of(scope, stream2)));
Map<Stream, StreamCut> startSCNew = ImmutableMap.of(
Stream.of(scope, stream2), new StreamCutImpl(Stream.of(scope, stream2), startStreamCut),
Stream.of(scope, stream3), new StreamCutImpl(Stream.of(scope, stream3), startStreamCut));
Map<Stream, StreamCut> endSCNew = ImmutableMap.of(
Stream.of(scope, stream2), new StreamCutImpl(Stream.of(scope, stream2), endStreamCut),
Stream.of(scope, stream3), new StreamCutImpl(Stream.of(scope, stream3), endStreamCut));
ReaderGroupConfig newRGConfig = ReaderGroupConfig.builder()
.automaticCheckpointIntervalMillis(1000L)
.groupRefreshTimeMillis(5000L)
.maxOutstandingCheckpointRequest(7)
.retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT)
.startingStreamCuts(startSCNew)
.endingStreamCuts(endSCNew).build();
newRGConfig = ReaderGroupConfig.cloneConfig(newRGConfig, rgConfig1.getReaderGroupId(), rgConfig1.getGeneration());
// Update Reader Group rg1
assertNotNull(controller.updateReaderGroup(scope, "rg1", newRGConfig).get());
ReaderGroupConfig updatedConfig = controller.getReaderGroupConfig(scope, "rg1").get();
assertEquals(newRGConfig.getGroupRefreshTimeMillis(), updatedConfig.getGroupRefreshTimeMillis());
assertEquals(newRGConfig.getGeneration() + 1, updatedConfig.getGeneration());
assertEquals(newRGConfig.getMaxOutstandingCheckpointRequest(), updatedConfig.getMaxOutstandingCheckpointRequest());
assertEquals(newRGConfig.getRetentionType(), updatedConfig.getRetentionType());
assertEquals(newRGConfig.getReaderGroupId(), updatedConfig.getReaderGroupId());
assertEquals(newRGConfig.getStartingStreamCuts().keySet().size(), updatedConfig.getStartingStreamCuts().keySet().size());
assertEquals(newRGConfig.getEndingStreamCuts().keySet().size(), updatedConfig.getEndingStreamCuts().keySet().size());
assertTrue(updatedConfig.getStartingStreamCuts().keySet().contains(Stream.of(scope, stream3)));
assertTrue(updatedConfig.getStartingStreamCuts().keySet().contains(Stream.of(scope, stream2)));
// re-create ReaderGroup "rg2"
ReaderGroupConfig rgConfigRecreate = ReaderGroupConfig.builder().disableAutomaticCheckpoints()
.stream(scopedStreamName3)
.retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT)
.build();
rgConfigRecreate = ReaderGroupConfig.cloneConfig(rgConfigRecreate, UUID.randomUUID(), 0L);
ReaderGroupConfig recreateRGResponse = controller.createReaderGroup(scope, "rg2", rgConfigRecreate).get();
assertEquals(rgConfigRecreate.getReaderGroupId(), recreateRGResponse.getReaderGroupId());
assertEquals(rgConfigRecreate.getRetentionType(), recreateRGResponse.getRetentionType());
// Update a ReaderGroup from Subscriber to Non-subscriber
final String readerGroupName = "rg3";
ReaderGroupConfig rgConfigSubscriber = ReaderGroupConfig.builder().disableAutomaticCheckpoints()
.stream(scopedStreamName1).retentionType(ReaderGroupConfig.StreamDataRetention.MANUAL_RELEASE_AT_USER_STREAMCUT)
.build();
rgConfigSubscriber = ReaderGroupConfig.cloneConfig(rgConfigSubscriber, UUID.randomUUID(), 0L);
ReaderGroupConfig subscriberRG = controller.createReaderGroup(scope, readerGroupName, rgConfigSubscriber).join();
assertEquals(rgConfigSubscriber.getReaderGroupId(), subscriberRG.getReaderGroupId());
subscribers = controller.listSubscribers(scope, stream1).get();
assertEquals(1, subscribers.size());
ReaderGroupConfig rgConfigNonSubscriber = ReaderGroupConfig.builder().disableAutomaticCheckpoints()
.stream(scopedStreamName1)
.build();
rgConfigNonSubscriber = ReaderGroupConfig.cloneConfig(rgConfigNonSubscriber, rgConfigSubscriber.getReaderGroupId(), rgConfigSubscriber.getGeneration());
long updatedGen = controller.updateReaderGroup(scope, readerGroupName, rgConfigNonSubscriber).join();
assertEquals(rgConfigNonSubscriber.getGeneration() + 1, updatedGen);
updatedConfig = controller.getReaderGroupConfig(scope, readerGroupName).join();
assertEquals(rgConfigNonSubscriber.getReaderGroupId(), updatedConfig.getReaderGroupId());
assertEquals(rgConfigNonSubscriber.getRetentionType(), updatedConfig.getRetentionType());
assertEquals(rgConfigNonSubscriber.getGeneration() + 1, updatedConfig.getGeneration());
subscribers = controller.listSubscribers(scope, stream1).get();
assertEquals(0, subscribers.size());
// Update ReaderGroup from Non-Subscriber to Subscriber
ReaderGroupConfig subscriberConfig = ReaderGroupConfig.builder()
.stream(scopedStreamName1).retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT)
.build();
subscriberConfig = ReaderGroupConfig.cloneConfig(subscriberConfig, updatedConfig.getReaderGroupId(), updatedConfig.getGeneration());
long gen = controller.updateReaderGroup(scope, readerGroupName, subscriberConfig).join();
assertEquals(subscriberConfig.getGeneration() + 1, gen);
ReaderGroupConfig newUpdatedConfig = controller.getReaderGroupConfig(scope, readerGroupName).join();
assertEquals(subscriberConfig.getReaderGroupId(), newUpdatedConfig.getReaderGroupId());
assertEquals(subscriberConfig.getRetentionType(), newUpdatedConfig.getRetentionType());
assertEquals(gen, newUpdatedConfig.getGeneration());
subscribers = controller.listSubscribers(scope, stream1).get();
assertEquals(1, subscribers.size());
}
private static void updateSubscriberStreamCutTest(Controller controller, final String scope, final String stream) throws InterruptedException, ExecutionException {
// add the first subscriber
Segment seg0 = new Segment(scope, stream, 0L);
Segment seg1 = new Segment(scope, stream, 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 10L, seg1, 10L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(Stream.of(scope, stream), new StreamCutImpl(Stream.of(scope, stream), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 200L, seg1, 300L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(Stream.of(scope, stream), new StreamCutImpl(Stream.of(scope, stream), endStreamCut));
final ReaderGroupConfig rgConfig = ReaderGroupConfig.builder()
.automaticCheckpointIntervalMillis(30000L)
.groupRefreshTimeMillis(20000L)
.maxOutstandingCheckpointRequest(2)
.retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT)
.startingStreamCuts(startSC)
.endingStreamCuts(endSC).build();
final String rg1 = "rg1";
ReaderGroupConfig createConfig = controller.createReaderGroup(scope, rg1, rgConfig).get();
assertFalse(ReaderGroupConfig.DEFAULT_UUID.equals(createConfig.getReaderGroupId()));
assertEquals(0L, createConfig.getGeneration());
List<String> subs = controller.listSubscribers(scope, stream).get();
assertEquals(1, subs.size());
String subscriber1 = NameUtils.getScopedReaderGroupName(scope, rg1);
assertEquals(subscriber1, subs.get(0));
Stream streamToBeUpdated = Stream.of(scope, stream);
seg0 = new Segment(scope, stream, 0L);
seg1 = new Segment(scope, stream, 1L);
ImmutableMap<Segment, Long> streamCutPositions = ImmutableMap.of(seg0, 1L, seg1, 11L);
StreamCut streamCut = new StreamCutImpl(streamToBeUpdated, streamCutPositions);
assertTrue(controller.updateSubscriberStreamCut(scope, stream, subscriber1, createConfig.getReaderGroupId(), createConfig.getGeneration(), streamCut).get());
ImmutableMap<Segment, Long> streamCutPositionsNew = ImmutableMap.of(seg0, 2L, seg1, 22L);
StreamCut streamCutNew = new StreamCutImpl(streamToBeUpdated, streamCutPositionsNew);
assertTrue(controller.updateSubscriberStreamCut(scope, stream, subscriber1, createConfig.getReaderGroupId(), createConfig.getGeneration(), streamCutNew).get());
}
private static void sealAStream(ControllerWrapper controllerWrapper, Controller controller,
final ScalingPolicy scalingPolicy, final String scopeSeal,
final String streamNameSeal) throws InterruptedException, ExecutionException {
controllerWrapper.getControllerService().createScope("scopeSeal", 0L).get();
final StreamConfiguration configSeal = StreamConfiguration.builder()
.scalingPolicy(scalingPolicy)
.build();
assertTrue(controller.createStream(scopeSeal, streamNameSeal, configSeal).get());
@SuppressWarnings("unused")
StreamSegments result = controller.getCurrentSegments(scopeSeal, streamNameSeal).get();
assertTrue(controller.sealStream(scopeSeal, streamNameSeal).get());
StreamSegments currentSegs = controller.getCurrentSegments(scopeSeal, streamNameSeal).get();
assertTrue("FAILURE: No active segments should be present in a sealed stream", currentSegs.getSegments().isEmpty());
}
private static void createAStream(String scope, String streamName, Controller controller,
final StreamConfiguration config) throws InterruptedException,
ExecutionException {
assertTrue(controller.createStream(scope, streamName, config).get());
}
private static void sealNonExistantStream(Controller controller, final String scopeSeal) {
assertFalse(Futures.await(controller.sealStream(scopeSeal, "nonExistentStream")));
}
private static void streamDuplicationNotAllowed(String scope, String streamName, Controller controller,
final StreamConfiguration config) throws InterruptedException,
ExecutionException {
assertFalse(controller.createStream(scope, streamName, config).get());
}
private static void sealASealedStream(Controller controller, final String scopeSeal,
final String streamNameSeal) throws InterruptedException, ExecutionException {
assertTrue(controller.sealStream(scopeSeal, streamNameSeal).get());
StreamSegments currentSegs = controller.getCurrentSegments(scopeSeal, streamNameSeal).get();
assertTrue("FAILURE: No active segments should be present in a sealed stream", currentSegs.getSegments().isEmpty());
}
private static void createAKeyValueTable(String scope, String kvtName, Controller controller,
final KeyValueTableConfiguration config) throws InterruptedException,
ExecutionException {
assertTrue(controller.createKeyValueTable(scope, kvtName, config).get());
}
private static void createAKeyValueTableZeroPC(String scope, String kvtName, Controller controller,
final KeyValueTableConfiguration config) {
assertThrows(IllegalArgumentException.class, () -> controller.createKeyValueTable(scope, kvtName, config).join());
}
}
| |
package org.mondo.collaboration.security.query;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.apache.log4j.Logger;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.incquery.runtime.api.IMatchProcessor;
import org.eclipse.incquery.runtime.api.IQuerySpecification;
import org.eclipse.incquery.runtime.api.IncQueryEngine;
import org.eclipse.incquery.runtime.api.impl.BaseMatcher;
import org.eclipse.incquery.runtime.exception.IncQueryException;
import org.eclipse.incquery.runtime.matchers.tuple.Tuple;
import org.eclipse.incquery.runtime.util.IncQueryLoggingUtil;
import org.mondo.collaboration.security.query.LockCHelperMatch;
import org.mondo.collaboration.security.query.util.LockCHelperQuerySpecification;
import wt.Control;
import wt.Module;
import wt.Signal;
/**
* Generated pattern matcher API of the org.mondo.collaboration.security.query.lockCHelper pattern,
* providing pattern-specific query methods.
*
* <p>Use the pattern matcher on a given model via {@link #on(IncQueryEngine)},
* e.g. in conjunction with {@link IncQueryEngine#on(Notifier)}.
*
* <p>Matches of the pattern will be represented as {@link LockCHelperMatch}.
*
* <p>Original source:
* <code><pre>
* pattern lockCHelper(signal :Signal, vendor, provider : Control, consumer : Module) {
* Composite.vendor(composite,vendor);
* Composite.submodules(composite,provider);
* Control.provides(provider, signal);
* Module.consumes(consumer, signal);
* //Signal.type(signal, ::Output);
* }
* </pre></code>
*
* @see LockCHelperMatch
* @see LockCHelperProcessor
* @see LockCHelperQuerySpecification
*
*/
@SuppressWarnings("all")
public class LockCHelperMatcher extends BaseMatcher<LockCHelperMatch> {
/**
* Initializes the pattern matcher within an existing EMF-IncQuery engine.
* If the pattern matcher is already constructed in the engine, only a light-weight reference is returned.
* The match set will be incrementally refreshed upon updates.
* @param engine the existing EMF-IncQuery engine in which this matcher will be created.
* @throws IncQueryException if an error occurs during pattern matcher creation
*
*/
public static LockCHelperMatcher on(final IncQueryEngine engine) throws IncQueryException {
// check if matcher already exists
LockCHelperMatcher matcher = engine.getExistingMatcher(querySpecification());
if (matcher == null) {
matcher = new LockCHelperMatcher(engine);
// do not have to "put" it into engine.matchers, reportMatcherInitialized() will take care of it
}
return matcher;
}
private final static int POSITION_SIGNAL = 0;
private final static int POSITION_VENDOR = 1;
private final static int POSITION_PROVIDER = 2;
private final static int POSITION_CONSUMER = 3;
private final static Logger LOGGER = IncQueryLoggingUtil.getLogger(LockCHelperMatcher.class);
/**
* Initializes the pattern matcher over a given EMF model root (recommended: Resource or ResourceSet).
* If a pattern matcher is already constructed with the same root, only a light-weight reference is returned.
* The scope of pattern matching will be the given EMF model root and below (see FAQ for more precise definition).
* The match set will be incrementally refreshed upon updates from this scope.
* <p>The matcher will be created within the managed {@link IncQueryEngine} belonging to the EMF model root, so
* multiple matchers will reuse the same engine and benefit from increased performance and reduced memory footprint.
* @param emfRoot the root of the EMF containment hierarchy where the pattern matcher will operate. Recommended: Resource or ResourceSet.
* @throws IncQueryException if an error occurs during pattern matcher creation
* @deprecated use {@link #on(IncQueryEngine)} instead, e.g. in conjunction with {@link IncQueryEngine#on(Notifier)}
*
*/
@Deprecated
public LockCHelperMatcher(final Notifier emfRoot) throws IncQueryException {
this(IncQueryEngine.on(emfRoot));
}
/**
* Initializes the pattern matcher within an existing EMF-IncQuery engine.
* If the pattern matcher is already constructed in the engine, only a light-weight reference is returned.
* The match set will be incrementally refreshed upon updates.
* @param engine the existing EMF-IncQuery engine in which this matcher will be created.
* @throws IncQueryException if an error occurs during pattern matcher creation
* @deprecated use {@link #on(IncQueryEngine)} instead
*
*/
@Deprecated
public LockCHelperMatcher(final IncQueryEngine engine) throws IncQueryException {
super(engine, querySpecification());
}
/**
* Returns the set of all matches of the pattern that conform to the given fixed values of some parameters.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @return matches represented as a LockCHelperMatch object.
*
*/
public Collection<LockCHelperMatch> getAllMatches(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer) {
return rawGetAllMatches(new Object[]{pSignal, pVendor, pProvider, pConsumer});
}
/**
* Returns an arbitrarily chosen match of the pattern that conforms to the given fixed values of some parameters.
* Neither determinism nor randomness of selection is guaranteed.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @return a match represented as a LockCHelperMatch object, or null if no match is found.
*
*/
public LockCHelperMatch getOneArbitraryMatch(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer) {
return rawGetOneArbitraryMatch(new Object[]{pSignal, pVendor, pProvider, pConsumer});
}
/**
* Indicates whether the given combination of specified pattern parameters constitute a valid pattern match,
* under any possible substitution of the unspecified parameters (if any).
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @return true if the input is a valid (partial) match of the pattern.
*
*/
public boolean hasMatch(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer) {
return rawHasMatch(new Object[]{pSignal, pVendor, pProvider, pConsumer});
}
/**
* Returns the number of all matches of the pattern that conform to the given fixed values of some parameters.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @return the number of pattern matches found.
*
*/
public int countMatches(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer) {
return rawCountMatches(new Object[]{pSignal, pVendor, pProvider, pConsumer});
}
/**
* Executes the given processor on each match of the pattern that conforms to the given fixed values of some parameters.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @param processor the action that will process each pattern match.
*
*/
public void forEachMatch(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer, final IMatchProcessor<? super LockCHelperMatch> processor) {
rawForEachMatch(new Object[]{pSignal, pVendor, pProvider, pConsumer}, processor);
}
/**
* Executes the given processor on an arbitrarily chosen match of the pattern that conforms to the given fixed values of some parameters.
* Neither determinism nor randomness of selection is guaranteed.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @param processor the action that will process the selected match.
* @return true if the pattern has at least one match with the given parameter values, false if the processor was not invoked
*
*/
public boolean forOneArbitraryMatch(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer, final IMatchProcessor<? super LockCHelperMatch> processor) {
return rawForOneArbitraryMatch(new Object[]{pSignal, pVendor, pProvider, pConsumer}, processor);
}
/**
* Returns a new (partial) match.
* This can be used e.g. to call the matcher with a partial match.
* <p>The returned match will be immutable. Use {@link #newEmptyMatch()} to obtain a mutable match object.
* @param pSignal the fixed value of pattern parameter signal, or null if not bound.
* @param pVendor the fixed value of pattern parameter vendor, or null if not bound.
* @param pProvider the fixed value of pattern parameter provider, or null if not bound.
* @param pConsumer the fixed value of pattern parameter consumer, or null if not bound.
* @return the (partial) match object.
*
*/
public LockCHelperMatch newMatch(final Signal pSignal, final String pVendor, final Control pProvider, final Module pConsumer) {
return LockCHelperMatch.newMatch(pSignal, pVendor, pProvider, pConsumer);
}
/**
* Retrieve the set of values that occur in matches for signal.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
protected Set<Signal> rawAccumulateAllValuesOfsignal(final Object[] parameters) {
Set<Signal> results = new HashSet<Signal>();
rawAccumulateAllValues(POSITION_SIGNAL, parameters, results);
return results;
}
/**
* Retrieve the set of values that occur in matches for signal.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Signal> getAllValuesOfsignal() {
return rawAccumulateAllValuesOfsignal(emptyArray());
}
/**
* Retrieve the set of values that occur in matches for signal.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Signal> getAllValuesOfsignal(final LockCHelperMatch partialMatch) {
return rawAccumulateAllValuesOfsignal(partialMatch.toArray());
}
/**
* Retrieve the set of values that occur in matches for signal.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Signal> getAllValuesOfsignal(final String pVendor, final Control pProvider, final Module pConsumer) {
return rawAccumulateAllValuesOfsignal(new Object[]{
null,
pVendor,
pProvider,
pConsumer
});
}
/**
* Retrieve the set of values that occur in matches for vendor.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
protected Set<String> rawAccumulateAllValuesOfvendor(final Object[] parameters) {
Set<String> results = new HashSet<String>();
rawAccumulateAllValues(POSITION_VENDOR, parameters, results);
return results;
}
/**
* Retrieve the set of values that occur in matches for vendor.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<String> getAllValuesOfvendor() {
return rawAccumulateAllValuesOfvendor(emptyArray());
}
/**
* Retrieve the set of values that occur in matches for vendor.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<String> getAllValuesOfvendor(final LockCHelperMatch partialMatch) {
return rawAccumulateAllValuesOfvendor(partialMatch.toArray());
}
/**
* Retrieve the set of values that occur in matches for vendor.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<String> getAllValuesOfvendor(final Signal pSignal, final Control pProvider, final Module pConsumer) {
return rawAccumulateAllValuesOfvendor(new Object[]{
pSignal,
null,
pProvider,
pConsumer
});
}
/**
* Retrieve the set of values that occur in matches for provider.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
protected Set<Control> rawAccumulateAllValuesOfprovider(final Object[] parameters) {
Set<Control> results = new HashSet<Control>();
rawAccumulateAllValues(POSITION_PROVIDER, parameters, results);
return results;
}
/**
* Retrieve the set of values that occur in matches for provider.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Control> getAllValuesOfprovider() {
return rawAccumulateAllValuesOfprovider(emptyArray());
}
/**
* Retrieve the set of values that occur in matches for provider.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Control> getAllValuesOfprovider(final LockCHelperMatch partialMatch) {
return rawAccumulateAllValuesOfprovider(partialMatch.toArray());
}
/**
* Retrieve the set of values that occur in matches for provider.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Control> getAllValuesOfprovider(final Signal pSignal, final String pVendor, final Module pConsumer) {
return rawAccumulateAllValuesOfprovider(new Object[]{
pSignal,
pVendor,
null,
pConsumer
});
}
/**
* Retrieve the set of values that occur in matches for consumer.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
protected Set<Module> rawAccumulateAllValuesOfconsumer(final Object[] parameters) {
Set<Module> results = new HashSet<Module>();
rawAccumulateAllValues(POSITION_CONSUMER, parameters, results);
return results;
}
/**
* Retrieve the set of values that occur in matches for consumer.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Module> getAllValuesOfconsumer() {
return rawAccumulateAllValuesOfconsumer(emptyArray());
}
/**
* Retrieve the set of values that occur in matches for consumer.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Module> getAllValuesOfconsumer(final LockCHelperMatch partialMatch) {
return rawAccumulateAllValuesOfconsumer(partialMatch.toArray());
}
/**
* Retrieve the set of values that occur in matches for consumer.
* @return the Set of all values, null if no parameter with the given name exists, empty set if there are no matches
*
*/
public Set<Module> getAllValuesOfconsumer(final Signal pSignal, final String pVendor, final Control pProvider) {
return rawAccumulateAllValuesOfconsumer(new Object[]{
pSignal,
pVendor,
pProvider,
null
});
}
@Override
protected LockCHelperMatch tupleToMatch(final Tuple t) {
try {
return LockCHelperMatch.newMatch((wt.Signal) t.get(POSITION_SIGNAL), (java.lang.String) t.get(POSITION_VENDOR), (wt.Control) t.get(POSITION_PROVIDER), (wt.Module) t.get(POSITION_CONSUMER));
} catch(ClassCastException e) {
LOGGER.error("Element(s) in tuple not properly typed!",e);
return null;
}
}
@Override
protected LockCHelperMatch arrayToMatch(final Object[] match) {
try {
return LockCHelperMatch.newMatch((wt.Signal) match[POSITION_SIGNAL], (java.lang.String) match[POSITION_VENDOR], (wt.Control) match[POSITION_PROVIDER], (wt.Module) match[POSITION_CONSUMER]);
} catch(ClassCastException e) {
LOGGER.error("Element(s) in array not properly typed!",e);
return null;
}
}
@Override
protected LockCHelperMatch arrayToMatchMutable(final Object[] match) {
try {
return LockCHelperMatch.newMutableMatch((wt.Signal) match[POSITION_SIGNAL], (java.lang.String) match[POSITION_VENDOR], (wt.Control) match[POSITION_PROVIDER], (wt.Module) match[POSITION_CONSUMER]);
} catch(ClassCastException e) {
LOGGER.error("Element(s) in array not properly typed!",e);
return null;
}
}
/**
* @return the singleton instance of the query specification of this pattern
* @throws IncQueryException if the pattern definition could not be loaded
*
*/
public static IQuerySpecification<LockCHelperMatcher> querySpecification() throws IncQueryException {
return LockCHelperQuerySpecification.instance();
}
}
| |
/*
Copyright 2007-2010 Selenium committers
Portions copyright 2011 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.firefox.internal;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.openqa.selenium.Beta;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.firefox.ExtensionConnection;
import org.openqa.selenium.firefox.FirefoxBinary;
import org.openqa.selenium.firefox.FirefoxProfile;
import org.openqa.selenium.internal.Lock;
import org.openqa.selenium.logging.LocalLogs;
import org.openqa.selenium.logging.NeedsLocalLogs;
import org.openqa.selenium.net.PortProber;
import org.openqa.selenium.remote.BeanToJsonConverter;
import org.openqa.selenium.remote.Command;
import org.openqa.selenium.remote.DriverCommand;
import org.openqa.selenium.remote.ErrorCodes;
import org.openqa.selenium.remote.JsonToBeanConverter;
import org.openqa.selenium.remote.Response;
import org.openqa.selenium.remote.SessionId;
import org.openqa.selenium.remote.internal.CircularOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.net.ConnectException;
import java.net.Socket;
import java.util.List;
import java.util.Map;
@Beta
public class MarionetteConnection implements ExtensionConnection, NeedsLocalLogs {
private final static int BUFFER_SIZE = 4096;
private final long connectTimeout;
private final FirefoxBinary process;
private final FirefoxProfile profile;
private final String host;
private final Lock lock;
private File profileDir;
private static Map<String, String> seleniumToMarionetteCommandMap = ImmutableMap.<String, String>builder()
.put(DriverCommand.GET, "get")
.put(DriverCommand.GET_CURRENT_WINDOW_HANDLE, "getWindow")
.put(DriverCommand.GET_WINDOW_HANDLES, "getWindows")
.put(DriverCommand.CLOSE, "closeWindow")
.put(DriverCommand.GET_CURRENT_URL, "getUrl")
.put(DriverCommand.FIND_CHILD_ELEMENT, "findElement")
.put(DriverCommand.FIND_CHILD_ELEMENTS, "findElements")
.put(DriverCommand.GET_ELEMENT_LOCATION, "getElementPosition")
.put(DriverCommand.GET_ALL_COOKIES, "getAllCookies")
.put(DriverCommand.QUIT, "deleteSession")
.build();
private Socket socket;
private PrintWriter writer;
private Reader reader;
private String marionetteId;
private LocalLogs logs = LocalLogs.getNullLogger();
public MarionetteConnection(Lock lock, FirefoxBinary binary, FirefoxProfile profile,
String host) throws Exception {
this.host = host;
this.connectTimeout = binary.getTimeout();
this.lock = lock;
this.profile = profile;
this.process = binary;
}
public void start() throws IOException {
int port = PortProber.findFreePort();
profile.setPreference("marionette.defaultPrefs.enabled", true);
profile.setPreference("marionette.defaultPrefs.port", port);
profile.setPreference("browser.warnOnQuit", false);
lock.lock(connectTimeout);
try {
profileDir = profile.layoutOnDisk();
process.clean(profile, profileDir);
String firefoxLogFile = System.getProperty("webdriver.firefox.logfile");
if (firefoxLogFile != null) {
if ("/dev/stdout".equals(firefoxLogFile)) {
process.setOutputWatcher(System.out);
} else {
File logFile = new File(firefoxLogFile);
process.setOutputWatcher(new CircularOutputStream(logFile, BUFFER_SIZE));
}
}
process.startProfile(profile, profileDir, "-foreground", "-marionette");
// Just for the record; the critical section is all along while firefox is starting with the
// profile.
// There is currently no mechanism for the profile to notify us when it has started
// successfully and is ready for requests. Instead, we must loop until we're able to
// open a connection with the server, at which point it should be safe to continue
// (since the extension shouldn't accept connections until it is ready for requests).
long waitUntil = System.currentTimeMillis() + connectTimeout;
while (!isConnected()) {
tryToConnect(host, port);
if (waitUntil < System.currentTimeMillis()) {
throw new Error("Can't connect to " + host + ":" + port + "\n" + process.getConsoleOutput());
}
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
// Do nothing
}
}
} catch (IOException e) {
e.printStackTrace();
throw new WebDriverException(
String.format("Failed to connect to binary %s on port %d; process output follows: \n%s",
process.toString(), port, process.getConsoleOutput()), e);
} catch (WebDriverException e) {
throw new WebDriverException(
String.format("Failed to connect to binary %s on port %d; process output follows: \n%s",
process.toString(), port, process.getConsoleOutput()), e);
} catch (Exception e) {
throw new WebDriverException(e);
} finally {
lock.unlock();
}
// Marionette sends back an initial acknowledgement response upon first
// connect. We need to read that response before we can proceed.
String rawResponse = receiveResponse();
// This initializes the "actor" for future communication with this instance.
sendCommand(serializeCommand(new Command(null, "getMarionetteID")));
String getMarionetteIdRawResponse = receiveResponse();
System.out.println(getMarionetteIdRawResponse);
Map<String, Object> map = new JsonToBeanConverter().convert(Map.class,
getMarionetteIdRawResponse);
marionetteId = map.get("id").toString();
}
private void tryToConnect(String host, int port) {
try {
socket = new Socket(host, port);
writer = new PrintWriter(socket.getOutputStream(), true);
reader = new InputStreamReader(socket.getInputStream());
} catch (ConnectException ex) {
socket = null;
writer = null;
reader = null;
} catch (IOException ex) {
socket = null;
writer = null;
reader = null;
}
}
public Response execute(Command command) throws IOException {
String commandAsString = serializeCommand(command);
sendCommand(commandAsString);
String rawResponse = receiveResponse();
Map<String, Object> map = new JsonToBeanConverter().convert(Map.class, rawResponse);
Response response;
if (DriverCommand.NEW_SESSION.equals(command.getName())) {
// See https://bugzilla.mozilla.org/show_bug.cgi?id=1073732
response = new Response(new SessionId(new BeanToJsonConverter().convert(map.get("value"))));
response.setValue(Maps.newHashMap());
} else {
if (map.containsKey("error")) {
// ***************************************************************
// Marionette Compliance Issue: Error responses should, at a
// minimum, put the status property at the root of the object.
// In other words:
// {
// status: 7,
// value:
// {
// message: "Did not find element with id=foo",
// stackTrace: <stack trace goes here>
// }
// }
// ***************************************************************
response = new Response();
Object value = map.get("error");
if (value != null) {
if (value instanceof Map) {
Map<String, Object> errorMap = (Map<String, Object>) value;
if (errorMap != null) {
response.setStatus(Integer.parseInt(errorMap.get("status").toString()));
errorMap.remove("status");
response.setValue(errorMap);
}
} else {
response.setStatus(ErrorCodes.UNHANDLED_ERROR);
response.setValue(value + ": " + map.get("message"));
}
}
} else {
response = new JsonToBeanConverter().convert(Response.class, rawResponse);
// ***************************************************************
// Marionette Compliance Issue: Responses from findElements
// are returned with raw element IDs as the value.
// This should be a JSON object with the following structure:
//
// { "ELEMENT": "<element ID goes here>" }
//
// This is to allow the client bindings to distinguish between
// a raw string and an element reference returned from the
// executeScript command.
// ***************************************************************
if (DriverCommand.GET_ACTIVE_ELEMENT.equals(command.getName()))
{
if (response.getStatus() == ErrorCodes.SUCCESS) {
Map<String, Object> wrappedElement = Maps.newHashMap();
wrappedElement.put("ELEMENT", response.getValue().toString());
response.setValue(wrappedElement);
}
}
}
}
return response;
}
private String serializeCommand(Command command) {
// System.out.println("Command " + command);
String commandName = command.getName();
Map<String, Object> params = Maps.newHashMap();
params.putAll(command.getParameters());
if (DriverCommand.NEW_SESSION.equals(commandName)) {
params.remove("desiredCapabilities");
} else if (DriverCommand.SET_TIMEOUT.equals(commandName)) {
String timeoutType = (String) params.get("type");
if ("implicit".equals(timeoutType)) {
commandName = "setSearchTimeout";
} else if ("script".equals(timeoutType)) {
commandName = "setScriptTimeout";
}
params.remove("type");
} else if (DriverCommand.FIND_CHILD_ELEMENT.equals(commandName)
|| DriverCommand.FIND_CHILD_ELEMENTS.equals(commandName)) {
renameParameter(params, "id", "element");
} else if (DriverCommand.CLICK.equals(commandName)
|| DriverCommand.DOUBLE_CLICK.equals(commandName)
|| DriverCommand.MOUSE_DOWN.equals(commandName)
|| DriverCommand.MOUSE_UP.equals(commandName)
|| DriverCommand.MOVE_TO.equals(commandName)) {
String actionName = commandName;
commandName = "actionChain";
List<Object> action = Lists.newArrayList();
action.add(actionName);
if (params.containsKey("element")) {
action.add(params.get("element"));
params.remove("element");
}
List<Object> actions = Lists.newArrayList();
actions.add(action);
params.put("chain", actions);
}
if (seleniumToMarionetteCommandMap.containsKey(commandName)) {
commandName = seleniumToMarionetteCommandMap.get(commandName);
}
Map<String, Object> map = Maps.newHashMap();
map.put("to", marionetteId != null ? marionetteId : "root");
map.put("name", commandName);
if (command.getSessionId() != null) {
// See https://bugzilla.mozilla.org/show_bug.cgi?id=1073732
map.put("sessionId", new JsonToBeanConverter().convert(Map.class, command.getSessionId().toString()));
}
map.put("parameters", params);
return new BeanToJsonConverter().convert(map);
}
private void renameParameter(Map<String, Object> params, String origParName, String newParName) {
Object o = params.get(origParName);
params.put(newParName, o);
params.remove(origParName);
}
private void sendCommand(String commandAsString) {
String line = "" + commandAsString.length() + ":" + commandAsString;
System.out.println(line);
writer.write(line);
writer.flush();
}
private String receiveResponse() throws IOException {
StringBuilder response = new StringBuilder();
char[] buf = new char[1024];
int len = reader.read(buf);
response.append(buf, 0, len);
String[] parts = response.toString().split(":", 2);
int contentLength = Integer.parseInt(parts[0]);
while (response.length() < contentLength + ":".length() + parts[0].length()) {
buf = new char[1024];
len = reader.read(buf);
if (len > 0) {
response.append(buf, 0, len);
} else {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
}
System.out.println("<- |" + response.toString() + "|");
parts = response.toString().split(":", 2);
return parts[1].substring(0, contentLength);
}
public void quit() {
try {
writer.close();
reader.close();
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
socket = null;
// This should only be called after the QUIT command has been sent,
// so go ahead and clean up our process and profile.
process.quit();
if (profileDir != null) {
profile.clean(profileDir);
}
}
public boolean isConnected() {
return socket != null && socket.isConnected();
}
public void setLocalLogs(LocalLogs logs) {
this.logs = logs;
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/chat/tags/sakai-10.1/chat-impl/impl/src/java/org/sakaiproject/chat2/model/impl/ChatEntityProducer.java $
* $Id: ChatEntityProducer.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
/**
*
*/
package org.sakaiproject.chat2.model.impl;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Stack;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.chat2.model.ChatChannel;
import org.sakaiproject.chat2.model.ChatManager;
import org.sakaiproject.chat2.model.ChatMessage;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.EntityNotDefinedException;
import org.sakaiproject.entity.api.EntityPermissionException;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.EntityTransferrer;
import org.sakaiproject.entity.api.HttpAccess;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.time.cover.TimeService;
import org.sakaiproject.user.cover.UserDirectoryService;
import org.sakaiproject.util.StringUtil;
import org.sakaiproject.util.Web;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* @author chrismaurer
*
*/
public class ChatEntityProducer implements EntityProducer, EntityTransferrer {
protected final Log logger = LogFactory.getLog(getClass());
private EntityManager entityManager;
private ChatManager chatManager;
private static final String ARCHIVE_VERSION = "2.4"; // in case new features are added in future exports
private static final String VERSION_ATTR = "version";
private static final String CHANNEL_PROP = "channel";
private static final String SYNOPTIC_TOOL = "synoptic_tool";
private static final String NAME = "name";
private static final String VALUE = "value";
private static final String PROPERTIES = "properties";
private static final String PROPERTY = "property";
protected void init() throws Exception {
logger.info("init()");
try {
getEntityManager().registerEntityProducer(this, ChatManager.REFERENCE_ROOT);
}
catch (Exception e) {
logger.warn("Error registering Chat Entity Producer", e);
}
}
/**
* Destroy
*/
protected void destroy()
{
logger.info("destroy()");
}
/**
* {@inheritDoc}
*/
public String[] myToolIds()
{
String[] toolIds = { ChatManager.CHAT_TOOL_ID };
return toolIds;
}
/**
* Get the service name for this class
* @return
*/
protected String serviceName() {
return ChatEntityProducer.class.getName();
}
public ChatMessage getMessage(Reference reference) throws IdUnusedException, PermissionException {
return getChatManager().getMessage(reference.getId());
//return null;
}
public ChatChannel getChannel(Reference reference) throws IdUnusedException, PermissionException {
return getChatManager().getChatChannel(reference.getId());
//return null;
}
/**
* {@inheritDoc}
*/
public String archive(String siteId, Document doc, Stack stack, String archivePath, List attachments)
{
//prepare the buffer for the results log
StringBuilder results = new StringBuilder();
int channelCount = 0;
try
{
// start with an element with our very own (service) name
Element element = doc.createElement(serviceName());
element.setAttribute(VERSION_ATTR, ARCHIVE_VERSION);
((Element) stack.peek()).appendChild(element);
stack.push(element);
Element chat = doc.createElement(ChatManager.CHAT);
List channelList = getChatManager().getContextChannels(siteId, true);
if (channelList != null && !channelList.isEmpty())
{
Iterator channelIterator = channelList.iterator();
while (channelIterator.hasNext())
{
ChatChannel channel = (ChatChannel)channelIterator.next();
Element channelElement = channel.toXml(doc, stack);
chat.appendChild(channelElement);
channelCount++;
}
results.append("archiving " + getLabel() + ": (" + channelCount + ") channels archived successfully.\n");
}
else
{
results.append("archiving " + getLabel()
+ ": empty chat room archived.\n");
}
// archive the chat synoptic tool options
archiveSynopticOptions(siteId, doc, chat);
((Element) stack.peek()).appendChild(chat);
stack.push(chat);
stack.pop();
}
catch (Exception any)
{
logger.warn("archive: exception archiving service: " + serviceName());
}
stack.pop();
return results.toString();
}
/**
* try to add synoptic options for this tool to the archive, if they exist
* @param siteId
* @param doc
* @param element
*/
public void archiveSynopticOptions(String siteId, Document doc, Element element)
{
try
{
// archive the synoptic tool options
Site site = SiteService.getSite(siteId);
ToolConfiguration synTool = site.getToolForCommonId("sakai.synoptic." + getLabel());
Properties synProp = synTool.getPlacementConfig();
if (synProp != null && synProp.size() > 0) {
Element synElement = doc.createElement(SYNOPTIC_TOOL);
Element synProps = doc.createElement(PROPERTIES);
Set synPropSet = synProp.keySet();
Iterator propIter = synPropSet.iterator();
while (propIter.hasNext())
{
String propName = (String)propIter.next();
Element synPropEl = doc.createElement(PROPERTY);
synPropEl.setAttribute(NAME, propName);
synPropEl.setAttribute(VALUE, synProp.getProperty(propName));
synProps.appendChild(synPropEl);
}
synElement.appendChild(synProps);
element.appendChild(synElement);
}
}
catch (Exception e)
{
logger.warn("archive: exception archiving synoptic options for service: " + serviceName());
}
}
/**
* {@inheritDoc}
*/
public Entity getEntity(Reference ref)
{
// we could check that the type is one of the message services, but lets just assume it is so we don't need to know them here -ggolden
Entity rv = null;
try
{
// if this is a channel
if (ChatManager.REF_TYPE_CHANNEL.equals(ref.getSubType()))
{
rv = getChatManager().getChatChannel(ref.getReference());
}
// otherwise a message
else if (ChatManager.REF_TYPE_MESSAGE.equals(ref.getSubType()))
{
rv = getMessage(ref);
}
// else try {throw new Exception();} catch (Exception e) {M_log.warn("getResource(): unknown message ref subtype: " + m_subType + " in ref: " + m_reference, e);}
else
logger.warn("getEntity(): unknown message ref subtype: " + ref.getSubType() + " in ref: " + ref.getReference());
}
catch (NullPointerException e)
{
logger.warn("getEntity(): " + e);
} catch (IdUnusedException e) {
logger.warn("getEntity(): " + e);
} catch (PermissionException e) {
logger.warn("getEntity(): " + e);
}
return rv;
}
/**
* {@inheritDoc}
*/
public Collection getEntityAuthzGroups(Reference ref, String userId)
{
//TODO implement this
return null;
}
/**
* {@inheritDoc}
*/
public String getEntityDescription(Reference ref)
{
// we could check that the type is one of the message services, but lets just assume it is so we don't need to know them here -ggolden
String rv = "Message: " + ref.getReference();
try
{
// if this is a channel
if (ChatManager.REF_TYPE_CHANNEL.equals(ref.getSubType()))
{
ChatChannel channel = getChannel(ref);
rv = "Channel: " + channel.getId() + " (" + channel.getContext() + ")";
}
}
catch (PermissionException e)
{
}
catch (IdUnusedException e)
{
}
catch (NullPointerException e)
{
}
return rv;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntityResourceProperties(org.sakaiproject.entity.api.Reference)
*/
public ResourceProperties getEntityResourceProperties(Reference ref) {
// TODO Auto-generated method stub
return null;
}
/**
* {@inheritDoc}
*/
public String getEntityUrl(Reference ref)
{
// we could check that the type is one of the message services, but lets just assume it is so we don't need to know them here -ggolden
String url = null;
try
{
// if this is a channel
if (ChatManager.REF_TYPE_CHANNEL.equals(ref.getSubType()))
{
ChatChannel channel = getChannel(ref);
url = channel.getUrl();
}
// otherwise a message
else if (ChatManager.REF_TYPE_MESSAGE.equals(ref.getSubType()))
{
ChatMessage message = getMessage(ref);
url = message.getUrl();
}
else
logger.warn("getUrl(): unknown message ref subtype: " + ref.getSubType() + " in ref: " + ref.getReference());
}
catch (PermissionException e)
{
logger.warn("getUrl(): " + e);
}
catch (IdUnusedException e)
{
logger.warn("getUrl(): " + e);
}
catch (NullPointerException e)
{
logger.warn("getUrl(): " + e);
}
return url;
}
/**
* {@inheritDoc}
*/
public HttpAccess getHttpAccess()
{
return new HttpAccess()
{
public void handleAccess(HttpServletRequest req, HttpServletResponse res, Reference ref,
Collection copyrightAcceptedRefs) throws EntityPermissionException, EntityNotDefinedException
{
try
{
//TODO: Isn't there a better way to do this than build out the whole page here??
// We need to write to a temporary stream for better speed, plus
// so we can get a byte count. Internet Explorer has problems
// if we don't make the setContentLength() call.
ByteArrayOutputStream outByteStream = new ByteArrayOutputStream();
OutputStreamWriter sw = new OutputStreamWriter(outByteStream);
String skin = ServerConfigurationService.getString("skin.default");
String skinRepo = ServerConfigurationService.getString("skin.repo");
ChatMessage message = getMessage(ref);
String title = ref.getDescription();
//MessageHeader messageHead = message.getHeader();
//String date = messageHead.getDate().toStringLocalFullZ();
String date = TimeService.newTime(message.getMessageDate().getTime()).toStringLocalFullZ();
String from = UserDirectoryService.getUser(message.getOwner()).getDisplayName();
//String from = messageHead.getFrom().getDisplayName();
String groups = "";
//Collection gr = messageHead.getGroups();
//for (Iterator i = gr.iterator(); i.hasNext();)
//{
// groups += "<li>" + i.next() + "</li>";
//}
String body = Web.escapeHtml(message.getBody());
sw
.write("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
+ "<html xmlns=\"http://www.w3.org/1999/xhtml\" lang=\"en\" xml:lang=\"en\">\n"
+ "<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\" />\n"
+ "<link href=\"");
sw.write(skinRepo);
sw.write("/tool_base.css\" type=\"text/css\" rel=\"stylesheet\" media=\"all\" />\n" + "<link href=\"");
sw.write(skinRepo);
sw.write("/");
sw.write(skin);
sw.write("/tool.css\" type=\"text/css\" rel=\"stylesheet\" media=\"all\" />\n"
+ "<meta http-equiv=\"Content-Style-Type\" content=\"text/css\" />\n" + "<title>");
sw.write(title);
sw.write("</title></head><body><div class=\"portletBody\">\n" + "<h2>");
sw.write(title);
sw.write("</h2><ul><li>Date ");
sw.write(date);
sw.write("</li>");
sw.write("<li>From ");
sw.write(from);
sw.write("</li>");
sw.write(groups);
sw.write("<ul><p>");
sw.write(body);
sw.write("</p></div></body></html> ");
sw.flush();
res.setContentType("text/html");
res.setContentLength(outByteStream.size());
if (outByteStream.size() > 0)
{
// Increase the buffer size for more speed.
res.setBufferSize(outByteStream.size());
}
OutputStream out = null;
try
{
out = res.getOutputStream();
if (outByteStream.size() > 0)
{
outByteStream.writeTo(out);
}
out.flush();
out.close();
}
catch (Throwable ignore)
{
}
finally
{
if (out != null)
{
try
{
out.close();
}
catch (Throwable ignore)
{
}
}
}
}
catch (PermissionException e)
{
throw new EntityPermissionException(e.getUser(), e.getLocalizedMessage(), e.getResource());
}
catch (IdUnusedException e)
{
throw new EntityNotDefinedException(e.getId());
}
catch (Throwable t)
{
throw new RuntimeException("Faied to find message ", t);
}
}
};
}
/**
* {@inheritDoc}
*/
public String getLabel() {
return getChatManager().getLabel();
}
/**
* {@inheritDoc}
*/
public String merge(String siteId, Element root, String archivePath, String fromSiteId, Map attachmentNames, Map userIdTrans,
Set userListAllowImport)
{
logger.debug("trying to merge chat");
// buffer for the results log
StringBuilder results = new StringBuilder();
int count = 0;
if (siteId != null && siteId.trim().length() > 0)
{
try
{
NodeList allChildrenNodes = root.getChildNodes();
int length = allChildrenNodes.getLength();
for (int i = 0; i < length; i++)
{
count++;
Node siteNode = allChildrenNodes.item(i);
if (siteNode.getNodeType() == Node.ELEMENT_NODE)
{
Element chatElement = (Element) siteNode;
if (chatElement.getTagName().equals(ChatManager.CHAT))
{
Site site = SiteService.getSite(siteId);
if (site.getToolForCommonId(ChatManager.CHAT_TOOL_ID) != null) {
// add the chat rooms and synoptic tool options
NodeList chatNodes = chatElement.getChildNodes();
int lengthChatNodes = chatNodes.getLength();
for (int cn = 0; cn < lengthChatNodes; cn++)
{
Node chatNode = chatNodes.item(cn);
if (chatNode.getNodeType() == Node.ELEMENT_NODE)
{
Element channelElement = (Element) chatNode;
if (channelElement.getTagName().equals(CHANNEL_PROP)) {
ChatChannel channel = ChatChannel.xmlToChatChannel(channelElement, siteId);
//save the channel
getChatManager().updateChannel(channel, false);
}
else if (channelElement.getTagName().equals(SYNOPTIC_TOOL))
{
ToolConfiguration synTool = site.getToolForCommonId("sakai.synoptic.chat");
Properties synProps = synTool.getPlacementConfig();
NodeList synPropNodes = channelElement.getChildNodes();
for (int props = 0; props < synPropNodes.getLength(); props++)
{
Node propsNode = synPropNodes.item(props);
if (propsNode.getNodeType() == Node.ELEMENT_NODE)
{
Element synPropEl = (Element) propsNode;
if (synPropEl.getTagName().equals(PROPERTIES))
{
NodeList synProperties = synPropEl.getChildNodes();
for (int p = 0; p < synProperties.getLength(); p++)
{
Node propertyNode = synProperties.item(p);
if (propertyNode.getNodeType() == Node.ELEMENT_NODE)
{
Element propEl = (Element) propertyNode;
if (propEl.getTagName().equals(PROPERTY))
{
String propName = propEl.getAttribute(NAME);
String propValue = propEl.getAttribute(VALUE);
if (propName != null && propName.length() > 0 && propValue != null && propValue.length() > 0)
{
synProps.setProperty(propName, propValue);
}
}
}
}
}
}
}
}
}
}
SiteService.save(site);
}
}
}
}
results.append("merging chat " + siteId + " (" + count
+ ") chat items.\n");
}
catch (DOMException e)
{
logger.error(e.getMessage(), e);
results.append("merging " + getLabel()
+ " failed during xml parsing.\n");
}
catch (Exception e)
{
logger.error(e.getMessage(), e);
results.append("merging " + getLabel() + " failed.\n");
}
}
return results.toString();
} // merge
/**
* {@inheritDoc}
*/
public boolean parseEntityReference(String reference, Reference ref)
{
if (reference.startsWith(ChatManager.REFERENCE_ROOT))
{
String[] parts = StringUtil.split(reference, Entity.SEPARATOR);
String id = null;
String subType = null;
String context = null;
String container = null;
// the first part will be null, then next the service, the third will be "msg" or "channel"
if (parts.length > 2)
{
subType = parts[2];
if (ChatManager.REF_TYPE_CHANNEL.equals(subType))
{
// next is the context id
if (parts.length > 3)
{
context = parts[3];
// next is the channel id
if (parts.length > 4)
{
id = parts[4];
}
}
}
else if (ChatManager.REF_TYPE_MESSAGE.equals(subType))
{
// next three parts are context, channel (container) and mesage id
if (parts.length > 5)
{
context = parts[3];
container = parts[4];
id = parts[5];
}
}
else
logger.warn("parse(): unknown message subtype: " + subType + " in ref: " + reference);
}
ref.set(ChatManager.APPLICATION_ID, subType, id, container, context);
return true;
}
return false;
}
/**
* {@inheritDoc}
*/
public boolean willArchiveMerge()
{
return true;
}
/**
* {@inheritDoc}
*
* TODO: link the old placement id to the new placement id instead of passing null in line:
* ChatChannel newChannel = getChatManager().createNewChannel(toContext, oldChannel.getTitle(), false, false, null);
*/
public void transferCopyEntities(String fromContext, String toContext, List ids)
{
try
{
// retrieve all of the chat rooms
List channels = getChatManager().getContextChannels(fromContext, true);
if (channels != null && !channels.isEmpty())
{
Iterator channelIterator = channels.iterator();
while (channelIterator.hasNext())
{
ChatChannel oldChannel = (ChatChannel)channelIterator.next();
ChatChannel newChannel = getChatManager().createNewChannel(toContext, oldChannel.getTitle(), false, false, null);
newChannel.setDescription(oldChannel.getDescription());
newChannel.setFilterType(oldChannel.getFilterType());
newChannel.setFilterParam(oldChannel.getFilterParam());
newChannel.setPlacementDefaultChannel(oldChannel.isPlacementDefaultChannel());
try {
getChatManager().updateChannel(newChannel, false);
}
catch (Exception e)
{
logger.warn("Exception while creating channel: " + newChannel.getTitle() + ": " + e);
}
}
}
transferSynopticOptions(fromContext, toContext);
}
catch (Exception any)
{
logger.warn(".transferCopyEntities(): exception in handling " + serviceName() + " : ", any);
}
}
/**
* Import the synoptic tool options from another site
*
* @param fromContext
* @param toContext
*/
protected void transferSynopticOptions(String fromContext, String toContext)
{
try
{
// transfer the synoptic tool options
Site fromSite = SiteService.getSite(fromContext);
ToolConfiguration fromSynTool = fromSite.getToolForCommonId("sakai.synoptic." + getLabel());
Properties fromSynProp = fromSynTool.getPlacementConfig();
Site toSite = SiteService.getSite(toContext);
ToolConfiguration toSynTool = toSite.getToolForCommonId("sakai.synoptic." + getLabel());
Properties toSynProp = toSynTool.getPlacementConfig();
if (fromSynProp != null && !fromSynProp.isEmpty())
{
Set synPropSet = fromSynProp.keySet();
Iterator propIter = synPropSet.iterator();
while (propIter.hasNext())
{
String propName = ((String)propIter.next());
String propValue = fromSynProp.getProperty(propName);
if (propValue != null && propValue.length() > 0)
{
toSynProp.setProperty(propName, propValue);
}
}
SiteService.save(toSite);
}
}
catch (PermissionException pe)
{
logger.warn("PermissionException transferring synoptic options for " + serviceName() + ':', pe);
}
catch (IdUnusedException e)
{
logger.warn("Channel " + fromContext + " cannot be found. ");
}
catch (Exception e)
{
logger.warn("transferSynopticOptions(): exception in handling " + serviceName() + " : ", e);
}
}
public EntityManager getEntityManager() {
return entityManager;
}
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
public ChatManager getChatManager() {
return chatManager;
}
public void setChatManager(ChatManager chatManager) {
this.chatManager = chatManager;
}
public void transferCopyEntities(String fromContext, String toContext, List ids, boolean cleanup)
{
try
{
if(cleanup == true)
{
// retrieve all of the chat rooms
List channels = getChatManager().getContextChannels(toContext, true);
if (channels != null && !channels.isEmpty())
{
Iterator channelIterator = channels.iterator();
while (channelIterator.hasNext())
{
ChatChannel oldChannel = (ChatChannel)channelIterator.next();
try
{
getChatManager().deleteChannel(oldChannel);
}
catch (Exception e)
{
logger.debug("Exception while removing chat channel: " + e);
}
}
}
}
transferCopyEntities(fromContext, toContext, ids);
}
catch (Exception e)
{
logger.debug("Chat transferCopyEntities(): exception in handling " + e);
}
}
}
| |
/**
* Copyright 2015 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
**/
package io.confluent.connect.hdfs;
import com.qubole.streamx.s3.S3SinkConnectorConstants;
import com.qubole.streamx.s3.wal.DBWAL;
import com.qubole.streamx.s3.wal.DBWAL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.errors.IllegalWorkerStateException;
import org.apache.kafka.connect.errors.SchemaProjectorException;
import org.apache.kafka.connect.sink.SinkRecord;
import org.apache.kafka.connect.sink.SinkTaskContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import io.confluent.connect.avro.AvroData;
import io.confluent.connect.hdfs.errors.HiveMetaStoreException;
import io.confluent.connect.hdfs.filter.CommittedFileFilter;
import io.confluent.connect.hdfs.filter.TopicPartitionCommittedFileFilter;
import io.confluent.connect.hdfs.hive.HiveMetaStore;
import io.confluent.connect.hdfs.hive.HiveUtil;
import io.confluent.connect.hdfs.partitioner.Partitioner;
import io.confluent.connect.hdfs.schema.Compatibility;
import io.confluent.connect.hdfs.schema.SchemaUtils;
import io.confluent.connect.hdfs.storage.Storage;
import io.confluent.connect.hdfs.wal.WAL;
public class TopicPartitionWriter {
private static final Logger log = LoggerFactory.getLogger(TopicPartitionWriter.class);
private WAL wal;
private Map<String, String> tempFiles;
private Map<String, RecordWriter<SinkRecord>> writers;
private TopicPartition tp;
private Partitioner partitioner;
private String url;
private String topicsDir;
private State state;
private Queue<SinkRecord> buffer;
private boolean recovered;
private Storage storage;
private SinkTaskContext context;
private int recordCounter;
private int flushSize;
private long rotateIntervalMs;
private long lastRotate;
private long rotateScheduleIntervalMs;
private long nextScheduledRotate;
private RecordWriterProvider writerProvider;
private Configuration conf;
private AvroData avroData;
private Set<String> appended;
private long offset;
private boolean sawInvalidOffset;
private Map<String, Long> startOffsets;
private Map<String, Long> offsets;
private long timeoutMs;
private long failureTime;
private Compatibility compatibility;
private Schema currentSchema;
private HdfsSinkConnectorConfig connectorConfig;
private String extension;
private final String zeroPadOffsetFormat;
private DateTimeZone timeZone;
private final boolean hiveIntegration;
private String hiveDatabase;
private HiveMetaStore hiveMetaStore;
private SchemaFileReader schemaFileReader;
private HiveUtil hive;
private ExecutorService executorService;
private Queue<Future<Void>> hiveUpdateFutures;
private Set<String> hivePartitions;
public TopicPartitionWriter(
TopicPartition tp,
Storage storage,
RecordWriterProvider writerProvider,
Partitioner partitioner,
HdfsSinkConnectorConfig connectorConfig,
SinkTaskContext context,
AvroData avroData) {
this(tp, storage, writerProvider, partitioner, connectorConfig, context, avroData, null, null, null, null, null);
}
public TopicPartitionWriter(
TopicPartition tp,
Storage storage,
RecordWriterProvider writerProvider,
Partitioner partitioner,
HdfsSinkConnectorConfig connectorConfig,
SinkTaskContext context,
AvroData avroData,
HiveMetaStore hiveMetaStore,
HiveUtil hive,
SchemaFileReader schemaFileReader,
ExecutorService executorService,
Queue<Future<Void>> hiveUpdateFutures) {
this.tp = tp;
this.connectorConfig = connectorConfig;
this.context = context;
this.avroData = avroData;
this.storage = storage;
this.writerProvider = writerProvider;
this.partitioner = partitioner;
this.url = storage.url();
this.conf = storage.conf();
this.schemaFileReader = schemaFileReader;
topicsDir = connectorConfig.getString(HdfsSinkConnectorConfig.TOPICS_DIR_CONFIG);
flushSize = connectorConfig.getInt(HdfsSinkConnectorConfig.FLUSH_SIZE_CONFIG);
rotateIntervalMs = connectorConfig.getLong(HdfsSinkConnectorConfig.ROTATE_INTERVAL_MS_CONFIG);
rotateScheduleIntervalMs = connectorConfig.getLong(HdfsSinkConnectorConfig.ROTATE_SCHEDULE_INTERVAL_MS_CONFIG);
timeoutMs = connectorConfig.getLong(HdfsSinkConnectorConfig.RETRY_BACKOFF_CONFIG);
compatibility = SchemaUtils.getCompatibility(
connectorConfig.getString(HdfsSinkConnectorConfig.SCHEMA_COMPATIBILITY_CONFIG));
String logsDir = connectorConfig.getString(HdfsSinkConnectorConfig.LOGS_DIR_CONFIG);
wal = storage.wal(logsDir, tp);
buffer = new LinkedList<>();
writers = new HashMap<>();
tempFiles = new HashMap<>();
appended = new HashSet<>();
startOffsets = new HashMap<>();
offsets = new HashMap<>();
state = State.RECOVERY_STARTED;
failureTime = -1L;
offset = -1L;
sawInvalidOffset = false;
extension = writerProvider.getExtension();
zeroPadOffsetFormat
= "%0" +
connectorConfig.getInt(HdfsSinkConnectorConfig.FILENAME_OFFSET_ZERO_PAD_WIDTH_CONFIG) +
"d";
hiveIntegration = connectorConfig.getBoolean(HdfsSinkConnectorConfig.HIVE_INTEGRATION_CONFIG);
if (hiveIntegration) {
hiveDatabase = connectorConfig.getString(HdfsSinkConnectorConfig.HIVE_DATABASE_CONFIG);
this.hiveMetaStore = hiveMetaStore;
this.hive = hive;
this.executorService = executorService;
this.hiveUpdateFutures = hiveUpdateFutures;
hivePartitions = new HashSet<>();
}
if(rotateScheduleIntervalMs > 0) {
timeZone = DateTimeZone.forID(connectorConfig.getString(HdfsSinkConnectorConfig.TIMEZONE_CONFIG));
}
// Initialize rotation timers
updateRotationTimers();
}
private enum State {
RECOVERY_STARTED,
RECOVERY_PARTITION_PAUSED,
WAL_APPLIED,
WAL_TRUNCATED,
OFFSET_RESET,
WRITE_STARTED,
WRITE_PARTITION_PAUSED,
SHOULD_ROTATE,
TEMP_FILE_CLOSED,
WAL_APPENDED,
FILE_COMMITTED;
private static State[] vals = values();
public State next() {
return vals[(this.ordinal() + 1) % vals.length];
}
}
@SuppressWarnings("fallthrough")
public boolean recover() {
try {
switch (state) {
case RECOVERY_STARTED:
log.info("Started recovery for topic partition {}", tp);
pause();
nextState();
case RECOVERY_PARTITION_PAUSED:
applyWAL();
nextState();
case WAL_APPLIED:
truncateWAL();
nextState();
case WAL_TRUNCATED:
resetOffsets();
nextState();
case OFFSET_RESET:
resume();
nextState();
log.info("Finished recovery for topic partition {}", tp);
break;
default:
log.error("{} is not a valid state to perform recovery for topic partition {}.", state, tp);
}
} catch (ConnectException e) {
log.error("Recovery failed at state {}", state, e);
setRetryTimeout(timeoutMs);
return false;
}
return true;
}
private void updateRotationTimers() {
lastRotate = System.currentTimeMillis();
if(log.isDebugEnabled() && rotateIntervalMs > 0) {
log.debug("Update last rotation timer. Next rotation for {} will be in {}ms", tp, rotateIntervalMs);
}
if (rotateScheduleIntervalMs > 0) {
nextScheduledRotate = DateTimeUtils.getNextTimeAdjustedByDay(lastRotate, rotateScheduleIntervalMs, timeZone);
if (log.isDebugEnabled()) {
log.debug("Update scheduled rotation timer. Next rotation for {} will be at {}", tp, new DateTime(nextScheduledRotate).withZone(timeZone).toString());
}
}
}
@SuppressWarnings("fallthrough")
public void write() {
long now = System.currentTimeMillis();
if (failureTime > 0 && now - failureTime < timeoutMs) {
return;
}
if (state.compareTo(State.WRITE_STARTED) < 0) {
boolean success = recover();
if (!success) {
return;
}
updateRotationTimers();
}
while(!buffer.isEmpty()) {
try {
switch (state) {
case WRITE_STARTED:
pause();
nextState();
case WRITE_PARTITION_PAUSED:
if (currentSchema == null) {
if (compatibility != Compatibility.NONE && offset != -1) {
String topicDir = FileUtils.topicDirectory(url, topicsDir, tp.topic());
CommittedFileFilter filter = new TopicPartitionCommittedFileFilter(tp);
FileStatus fileStatusWithMaxOffset = FileUtils.fileStatusWithMaxOffset(storage, new Path(topicDir), filter);
if (fileStatusWithMaxOffset != null) {
currentSchema = schemaFileReader.getSchema(conf, fileStatusWithMaxOffset.getPath());
}
}
}
SinkRecord record = buffer.peek();
Schema valueSchema = record.valueSchema();
if (SchemaUtils.shouldChangeSchema(valueSchema, currentSchema, compatibility)) {
currentSchema = valueSchema;
if (hiveIntegration) {
createHiveTable();
alterHiveSchema();
}
if (recordCounter > 0) {
nextState();
} else {
break;
}
} else {
SinkRecord projectedRecord = SchemaUtils.project(record, currentSchema, compatibility);
writeRecord(projectedRecord);
buffer.poll();
if (shouldRotate(now)) {
log.info("Starting commit and rotation for topic partition {} with start offsets {}"
+ " and end offsets {}", tp, startOffsets, offsets);
nextState();
// Fall through and try to rotate immediately
} else {
break;
}
}
case SHOULD_ROTATE:
updateRotationTimers();
closeTempFile();
nextState();
case TEMP_FILE_CLOSED:
appendToWAL();
nextState();
case WAL_APPENDED:
commitFile();
nextState();
case FILE_COMMITTED:
setState(State.WRITE_PARTITION_PAUSED);
break;
default:
log.error("{} is not a valid state to write record for topic partition {}.", state, tp);
}
} catch (SchemaProjectorException | IllegalWorkerStateException | HiveMetaStoreException e ) {
throw new RuntimeException(e);
} catch (IOException | ConnectException e) {
log.error("Exception on topic partition {}: ", tp, e);
failureTime = System.currentTimeMillis();
setRetryTimeout(timeoutMs);
break;
}
}
if (buffer.isEmpty()) {
// committing files after waiting for rotateIntervalMs time but less than flush.size records available
if (recordCounter > 0 && shouldRotate(now)) {
log.info("committing files after waiting for rotateIntervalMs time but less than flush.size records available.");
updateRotationTimers();
try {
closeTempFile();
appendToWAL();
commitFile();
} catch (IOException e) {
log.error("Exception on topic partition {}: ", tp, e);
failureTime = System.currentTimeMillis();
setRetryTimeout(timeoutMs);
}
}
resume();
state = State.WRITE_STARTED;
}
}
public void close() throws ConnectException {
log.debug("Closing TopicPartitionWriter {}", tp);
List<Exception> exceptions = new ArrayList<>();
for (String encodedPartition : tempFiles.keySet()) {
try {
if (writers.containsKey(encodedPartition)) {
log.debug("Discarding in progress tempfile {} for {} {}",
tempFiles.get(encodedPartition), tp, encodedPartition);
closeTempFile(encodedPartition);
deleteTempFile(encodedPartition);
}
} catch (IOException e) {
log.error("Error discarding temp file {} for {} {} when closing TopicPartitionWriter:",
tempFiles.get(encodedPartition), tp, encodedPartition, e);
}
}
writers.clear();
try {
wal.close();
} catch (ConnectException e) {
log.error("Error closing {}.", wal.getLogFile(), e);
exceptions.add(e);
}
startOffsets.clear();
offsets.clear();
if (exceptions.size() != 0) {
StringBuilder sb = new StringBuilder();
for (Exception exception: exceptions) {
sb.append(exception.getMessage());
sb.append("\n");
}
throw new ConnectException("Error closing writer: " + sb.toString());
}
}
public void buffer(SinkRecord sinkRecord) {
buffer.add(sinkRecord);
}
public long offset() {
return offset;
}
public Map<String, RecordWriter<SinkRecord>> getWriters() {
return writers;
}
public Map<String, String> getTempFiles() {
return tempFiles;
}
public String getExtension() {
return writerProvider.getExtension();
}
private String getDirectory(String encodedPartition) {
return partitioner.generatePartitionedPath(tp.topic(), encodedPartition);
}
private void nextState() {
state = state.next();
}
private void setState(State state) {
this.state = state;
}
private boolean shouldRotate(long now) {
boolean periodicRotation = rotateIntervalMs > 0 && now - lastRotate >= rotateIntervalMs;
boolean scheduledRotation = rotateScheduleIntervalMs > 0 && now >= nextScheduledRotate;
boolean messageSizeRotation = recordCounter >= flushSize;
return periodicRotation || scheduledRotation || messageSizeRotation;
}
private void readOffsetFromWAL() throws ConnectException {
offset = wal.readOffsetFromWAL();
}
private void readOffset() throws ConnectException {
try {
String path = FileUtils.topicDirectory(url, topicsDir, tp.topic());
CommittedFileFilter filter = new TopicPartitionCommittedFileFilter(tp);
FileStatus fileStatusWithMaxOffset = FileUtils.fileStatusWithMaxOffset(storage, new Path(path), filter);
if (fileStatusWithMaxOffset != null) {
offset = FileUtils.extractOffset(fileStatusWithMaxOffset.getPath().getName()) + 1;
}
} catch (IOException e) {
throw new ConnectException(e);
}
}
private void pause() {
context.pause(tp);
}
private void resume() {
context.resume(tp);
}
private RecordWriter<SinkRecord> getWriter(SinkRecord record, String encodedPartition)
throws ConnectException {
try {
if (writers.containsKey(encodedPartition)) {
return writers.get(encodedPartition);
}
String tempFile = getTempFile(encodedPartition);
RecordWriter<SinkRecord> writer = writerProvider.getRecordWriter(conf, tempFile, record, avroData);
writers.put(encodedPartition, writer);
if (hiveIntegration && !hivePartitions.contains(encodedPartition)) {
addHivePartition(encodedPartition);
hivePartitions.add(encodedPartition);
}
return writer;
} catch (IOException e) {
throw new ConnectException(e);
}
}
private String getTempFile(String encodedPartition) {
String tempFile;
if (tempFiles.containsKey(encodedPartition)) {
tempFile = tempFiles.get(encodedPartition);
} else {
String directory = HdfsSinkConnectorConstants.TEMPFILE_DIRECTORY + getDirectory(encodedPartition);
tempFile = FileUtils.tempFileName(url, topicsDir, directory, extension);
tempFiles.put(encodedPartition, tempFile);
}
return tempFile;
}
private void applyWAL() throws ConnectException {
if (!recovered) {
wal.apply();
}
}
private void truncateWAL() throws ConnectException {
if (!recovered) {
wal.truncate();
}
}
private void resetOffsets() throws ConnectException {
if (!recovered) {
if(wal instanceof DBWAL)
readOffsetFromWAL();
else
readOffset();
// Note that we must *always* request that we seek to an offset here. Currently the framework will still commit
// Kafka offsets even though we track our own (see KAFKA-3462), which can result in accidentally using that offset
// if one was committed but no files were rolled to their final location in HDFS (i.e. some data was accepted,
// written to a tempfile, but then that tempfile was discarded). To protect against this, even if we just want
// to start at offset 0 or reset to the earliest offset, we specify that explicitly to forcibly override any
// committed offsets.
long seekOffset = offset > 0 ? offset : 0;
log.debug("Resetting offset for {} to {}", tp, seekOffset);
context.offset(tp, seekOffset);
recovered = true;
}
}
private void writeRecord(SinkRecord record) throws IOException {
long expectedOffset = offset + recordCounter;
if (offset == -1) {
offset = record.kafkaOffset();
} else if (record.kafkaOffset() != expectedOffset) {
// Currently it's possible to see stale data with the wrong offset after a rebalance when you
// rewind, which we do since we manage our own offsets. See KAFKA-2894.
if (!sawInvalidOffset) {
log.info(
"Ignoring stale out-of-order record in {}-{}. Has offset {} instead of expected offset {}",
record.topic(), record.kafkaPartition(), record.kafkaOffset(), expectedOffset);
}
sawInvalidOffset = true;
return;
}
if (sawInvalidOffset) {
log.info(
"Recovered from stale out-of-order records in {}-{} with offset {}",
record.topic(), record.kafkaPartition(), expectedOffset);
sawInvalidOffset = false;
}
String encodedPartition = partitioner.encodePartition(record);
RecordWriter<SinkRecord> writer = getWriter(record, encodedPartition);
writer.write(record);
if (!startOffsets.containsKey(encodedPartition)) {
startOffsets.put(encodedPartition, record.kafkaOffset());
offsets.put(encodedPartition, record.kafkaOffset());
} else {
offsets.put(encodedPartition, record.kafkaOffset());
}
recordCounter++;
}
private void closeTempFile(String encodedPartition) throws IOException {
if (writers.containsKey(encodedPartition)) {
RecordWriter<SinkRecord> writer = writers.get(encodedPartition);
writer.close();
writers.remove(encodedPartition);
}
}
private void closeTempFile() throws IOException {
for (String encodedPartition: tempFiles.keySet()) {
closeTempFile(encodedPartition);
}
}
private void appendToWAL(String encodedPartition) throws IOException {
String tempFile = tempFiles.get(encodedPartition);
if (appended.contains(tempFile)) {
return;
}
if (!startOffsets.containsKey(encodedPartition)) {
return;
}
long startOffset = startOffsets.get(encodedPartition);
long endOffset = offsets.get(encodedPartition);
String directory = getDirectory(encodedPartition);
String committedFile = FileUtils.committedFileName(url, topicsDir, directory, tp,
startOffset, endOffset, extension,
zeroPadOffsetFormat);
wal.append(tempFile, committedFile);
appended.add(tempFile);
}
private void appendToWAL() throws IOException {
beginAppend();
for (String encodedPartition: tempFiles.keySet()) {
appendToWAL(encodedPartition);
}
endAppend();
}
private void beginAppend() throws IOException {
if (!appended.contains(WAL.beginMarker)) {
wal.append(WAL.beginMarker, "");
}
}
private void endAppend() throws IOException {
if (!appended.contains(WAL.endMarker)) {
wal.append(WAL.endMarker, "");
}
}
private void commitFile() throws IOException {
appended.clear();
for (String encodedPartition: tempFiles.keySet()) {
commitFile(encodedPartition);
}
}
private void commitFile(String encodedPartiton) throws IOException {
if (!startOffsets.containsKey(encodedPartiton)) {
return;
}
long startOffset = startOffsets.get(encodedPartiton);
long endOffset = offsets.get(encodedPartiton);
String tempFile = tempFiles.get(encodedPartiton);
String directory = getDirectory(encodedPartiton);
String committedFile = FileUtils.committedFileName(url, topicsDir, directory, tp,
startOffset, endOffset, extension,
zeroPadOffsetFormat);
String directoryName = FileUtils.directoryName(url, topicsDir, directory);
if (!storage.exists(directoryName)) {
storage.mkdirs(directoryName);
}
storage.commit(tempFile, committedFile);
startOffsets.remove(encodedPartiton);
offset = offset + recordCounter;
recordCounter = 0;
log.info("Committed {} for {}", committedFile, tp);
}
private void deleteTempFile(String encodedPartiton) throws IOException {
storage.delete(tempFiles.get(encodedPartiton));
}
private void setRetryTimeout(long timeoutMs) {
context.timeout(timeoutMs);
}
private void createHiveTable() {
Future<Void> future = executorService.submit(new Callable<Void>() {
@Override
public Void call() throws HiveMetaStoreException {
hive.createTable(hiveDatabase, tp.topic(), currentSchema, partitioner);
return null;
}
});
hiveUpdateFutures.add(future);
}
private void alterHiveSchema() {
Future<Void> future = executorService.submit(new Callable<Void>() {
@Override
public Void call() throws HiveMetaStoreException {
hive.alterSchema(hiveDatabase, tp.topic(), currentSchema);
return null;
}
});
hiveUpdateFutures.add(future);
}
private void addHivePartition(final String location) {
Future<Void> future = executorService.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
hiveMetaStore.addPartition(hiveDatabase, tp.topic(), location);
return null;
}
});
hiveUpdateFutures.add(future);
}
}
| |
package cz.metacentrum.perun.core.impl;
import cz.metacentrum.perun.core.api.ExtSourcesManager;
import cz.metacentrum.perun.core.api.PerunClient;
import cz.metacentrum.perun.core.api.PerunPrincipal;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.User;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ExtSourceUnsupportedOperationException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.SubjectNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.UserExtSourceNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.UserNotExistsException;
import cz.metacentrum.perun.core.blImpl.PerunBlImpl;
import cz.metacentrum.perun.core.implApi.ExtSourceApi;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* ExtSource class for REMS, filters subjects that does not have a corresponding user in Perun
* by ues REMS or by additionalueses in format: {extSourceName}|{extSourceClass}|{eppn}|0.
* The eppn is used as a 'login'.
* @author Vojtech Sassmann <vojtech.sassmann@gmail.com>
*/
public class ExtSourceREMS extends ExtSourceSqlComplex implements ExtSourceApi {
private final static Logger log = LoggerFactory.getLogger(ExtSourceREMS.class);
private static PerunBlImpl perunBl;
// filled by spring (perun-core.xml)
public static PerunBlImpl setPerunBlImpl(PerunBlImpl perun) {
perunBl = perun;
return perun;
}
@Override
public List<Map<String, String>> findSubjects(String searchString) throws ExtSourceUnsupportedOperationException {
List<Map<String, String>> subjects = super.findSubjects(searchString);
return filterNonExistingUsers(subjects);
}
@Override
public List<Map<String, String>> findSubjects(String searchString, int maxResults) throws ExtSourceUnsupportedOperationException {
List<Map<String, String>> subjects = super.findSubjects(searchString, maxResults);
return filterNonExistingUsers(subjects);
}
@Override
public Map<String, String> getSubjectByLogin(String login) throws SubjectNotExistsException {
Map<String, String> subject = super.getSubjectByLogin(login);
if (!isExistingUser(subject)) {
throw new SubjectNotExistsException("Subject for given login does not exist in Perun");
}
return subject;
}
@Override
public List<Map<String, String>> findSubjectsLogins(String searchString) {
List<Map<String, String>> subjects = super.findSubjectsLogins(searchString);
return filterNonExistingUsers(subjects);
}
@Override
public List<Map<String, String>> findSubjectsLogins(String searchString, int maxResults) {
List<Map<String, String>> subjects = super.findSubjectsLogins(searchString, maxResults);
return filterNonExistingUsers(subjects);
}
@Override
public List<Map<String, String>> getGroupSubjects(Map<String, String> attributes) {
List<Map<String, String>> subjects = super.getGroupSubjects(attributes);
return filterNonExistingUsers(subjects);
}
@Override
public List<Map<String, String>> getSubjectGroups(Map<String, String> attributes) throws ExtSourceUnsupportedOperationException {
throw new ExtSourceUnsupportedOperationException();
}
@Override
public List<Map<String, String>> getUsersSubjects() {
List<Map<String, String>> subjects = super.getUsersSubjects();
return filterNonExistingUsers(subjects);
}
/**
* Filters subjects that does not have a corresponding user in Perun by ues REMS
* or by additionalueses in format: {extSourceName}|{extSourceClass}|{eppn}|0.
* The eppn is used as a 'login'.
*
* @param subjects subjects that will be filtered
* @return List without filtered subjects
* @throws InternalErrorException internalError
*/
private List<Map<String, String>> filterNonExistingUsers(List<Map<String, String>> subjects) {
List<Map<String, String>> existingSubjects = new ArrayList<>();
for (Map<String, String> subject : subjects) {
if(isExistingUser(subject)) {
existingSubjects.add(subject);
}
}
return existingSubjects;
}
/**
* Checks if the given subject has a corresponding user in Perun by ues REMS
* or by additionalueses in format: {extSourceName}|{extSourceClass}|{eppn}|0.
* The eppn is used as a 'login'.
*
* @param subject subject
* @return true if the subject has, false otherwise
* @throws InternalErrorException internalError
*/
private boolean isExistingUser(Map<String, String> subject) {
if (subject == null || subject.isEmpty()) {
throw new InternalErrorException("Subject can not be empty or null: " + subject);
}
String login = subject.get("login");
if (login == null || login.isEmpty()) {
log.error("Failed to get user's login from subject {}");
return false;
}
PerunSession sess = getSession();
// test if subject does not exist already with UserExtSourceREMS. If so, it means that subject already exists in perun.
List<User> usersFromREMS = perunBl.getUsersManagerBl().getUsersByExtSourceTypeAndLogin(sess, ExtSourcesManager.EXTSOURCE_REMS, login);
if (usersFromREMS.size() > 0) {
return true;
}
List<String> extSources = getAdditionalUESes(subject);
if (extSources.isEmpty()) {
log.error("Failed to get any additionalues from subject {}", subject);
return false;
}
for (String ues : extSources) {
if (existsSubjectWithUes(ues)) {
return true;
}
}
return false;
}
/**
* Finds out for given ues and login exits user in Perun.
* Format of ues is {extSourceName}|{extSourceClass}|{eppn}|0.
* The eppn is used as a 'login'.
*
* @param ues ues with user login: {extSourceName}|{extSourceClass}|{eppn}|0
* @return true if is found existing ues with given login, false otherwise
* @throws InternalErrorException internalError
*/
private boolean existsSubjectWithUes(String ues) {
String[] extSourceSplit = ues.split("\\|", 4);
if(extSourceSplit.length != 4) {
log.error("Ivalid format of additionalues_1. It should be '{extSourceName}|{extSourceClass}|{eppn}|0'. Actual: {}", ues);
return false;
}
PerunSession sess = getSession();
String extSourceName = extSourceSplit[0];
String eppn = extSourceSplit[2];
try {
// try to find user by additionalues
perunBl.getUsersManagerBl().getUserByExtSourceNameAndExtLogin(sess, extSourceName, eppn);
return true;
} catch (ExtSourceNotExistsException | UserExtSourceNotExistsException e) {
log.error("Failed to get extSource with name '{}'", extSourceName);
} catch (UserNotExistsException e) {
return false;
}
return false;
}
/**
* Returns a List of Strings with additional UESes.
*
* @param subject subject
* @return List of additional UESes
*/
private List<String> getAdditionalUESes(Map<String, String> subject) {
List<String> extSources = new ArrayList<>();
Set<String> keys = subject.keySet();
for (String key : keys) {
if (key.startsWith("additionalues_")) {
extSources.add(subject.get(key));
}
}
return extSources;
}
private PerunSession getSession() {
final PerunPrincipal pp = new PerunPrincipal("ExtSourceREMS", ExtSourcesManager.EXTSOURCE_NAME_INTERNAL, ExtSourcesManager.EXTSOURCE_INTERNAL);
try {
return perunBl.getPerunSession(pp, new PerunClient());
} catch (InternalErrorException e) {
throw new InternalErrorException("Failed to get session for ExtSourceREMS.", e);
}
}
}
| |
/*
* $Id$
* Copyright (C) 2006 Klaus Reimer <k@ailis.de>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package de.ailis.wlandsuite.game.blocks;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dom4j.Document;
import org.dom4j.Element;
import de.ailis.wlandsuite.common.exceptions.GameException;
import de.ailis.wlandsuite.game.RotatingXorInputStream;
import de.ailis.wlandsuite.game.RotatingXorOutputStream;
import de.ailis.wlandsuite.game.parts.Action;
import de.ailis.wlandsuite.game.parts.ActionClassMap;
import de.ailis.wlandsuite.game.parts.ActionMap;
import de.ailis.wlandsuite.game.parts.Actions;
import de.ailis.wlandsuite.game.parts.BattleStrings;
import de.ailis.wlandsuite.game.parts.CentralDirectory;
import de.ailis.wlandsuite.game.parts.Info;
import de.ailis.wlandsuite.game.parts.Monsters;
import de.ailis.wlandsuite.game.parts.NPCs;
import de.ailis.wlandsuite.game.parts.SpecialAction;
import de.ailis.wlandsuite.game.parts.SpecialActionTable;
import de.ailis.wlandsuite.game.parts.Strings;
import de.ailis.wlandsuite.game.parts.TileMap;
import de.ailis.wlandsuite.io.SeekableInputStream;
import de.ailis.wlandsuite.io.SeekableOutputStream;
import de.ailis.wlandsuite.utils.StringUtils;
import de.ailis.wlandsuite.utils.XmlUtils;
/**
* A game map.
*
* @author Klaus Reimer (k@ailis.de)
* @version $Revision$
*/
public class GameMap extends GameBlock implements Serializable
{
/** Serial version UID */
private static final long serialVersionUID = 3535759235422069966L;
/** The logger */
private static final Log log = LogFactory.getLog(GameMap.class);
/** The map size */
private int mapSize;
/** The MSQ block size */
private int msqSize;
/** The offset of the tilemap */
private int tilemapOffset;
/** The map info */
private Info info;
/** The battle strings */
private BattleStrings battleStrings;
/** The action map */
private ActionClassMap actionClassMap;
/** The action map */
private ActionMap actionMap;
/** The tiles map */
private TileMap tileMap;
/** The strings */
private Strings strings;
/** The NPCs */
private NPCs npcs;
/** The monsters */
private Monsters monsters;
/** The actions */
private final Map<Integer, Actions> actions;
/**
* Constructs a new map with the specified map size. The map size must be 64
* or 32. Maps are always quadratic. It's not possible to have different
* widths and heights.
*
* The MSQ block size must be specified so the object knows how many padding
* bytes must be inserted during save. MSQ block sizes are hardcoded in the
* EXE so new Maps must fit these hardcoded boundaries to be loadable.
*
* The tilemap offset must also be specified because the position of the
* tilemap is also hardcoded in the exe. So a new map must know where to
* save the tilemap in the map file.
*
* @param mapSize
* The map size
* @param msqBlockSize
* The MSQ block size
* @param tilemapOffset
* The offset of the tilemap
*/
public GameMap(final int mapSize, final int msqBlockSize,
final int tilemapOffset)
{
if (mapSize != 32 && mapSize != 64)
{
throw new IllegalArgumentException("Illegal map size specified: "
+ mapSize + ". Valid sizes are 32 and 64");
}
this.mapSize = mapSize;
this.msqSize = msqBlockSize;
this.tilemapOffset = tilemapOffset;
this.actionClassMap = new ActionClassMap(mapSize);
this.actionMap = new ActionMap(mapSize);
this.actions = new HashMap<Integer, Actions>(15);
}
/**
* Constructs a map by reading it from a wasteland gameX file stream. The
* stream must point at the beginning of the MSQ block (which is at the "m"
* of the "msq" header string.
*
* Because it's not possible to read the length of a MSQ block from the MSQ
* block itself the size of the block must be specified manually.
*
* @param stream
* The input stream
* @param msqBlockSize
* The block size
* @return The newly constructed Game Map
* @throws IOException
* When file operation fails.
*/
public static GameMap read(final SeekableInputStream stream,
final int msqBlockSize) throws IOException
{
byte[] headerBytes;
String header;
RotatingXorInputStream xorStream;
byte[] bytes;
int mapSize;
int encSize;
int tilemapOffset;
GameMap gameMap;
long startOffset;
// Read the MSQ block header and validate it
headerBytes = new byte[4];
stream.read(headerBytes);
header = new String(headerBytes, "ASCII");
if (!header.equals("msq0") && !header.equals("msq1"))
{
throw new IOException("No MSQ block header found at stream");
}
// Get the starting offset
startOffset = stream.tell();
// Read/Decrypt beginning of the MSQ block body
bytes = new byte[6189];
xorStream = new RotatingXorInputStream(stream);
xorStream.read(bytes);
// Determine the map size and initialize the map with it
mapSize = determineMapSize(bytes);
// Determine the encryption size
encSize = determineEncryptionSize(bytes, mapSize);
// Read/Decrypt the whole block
bytes = new byte[msqBlockSize - 6];
stream.seek(startOffset);
xorStream = new RotatingXorInputStream(stream);
xorStream.read(bytes, 0, encSize);
stream.read(bytes, encSize, bytes.length - encSize);
// Determine the tiles offset
tilemapOffset = determineTilesOffset(bytes, mapSize);
// Create the byte array stream and begin parsing the input
final SeekableInputStream blockStream = new SeekableInputStream(
new ByteArrayInputStream(bytes));
try
{
// Create the Game Map
gameMap = new GameMap(mapSize, msqBlockSize, tilemapOffset);
// Read the map data
gameMap.readMapData(blockStream, tilemapOffset, mapSize, true);
// Return the created map
return gameMap;
}
finally
{
blockStream.close();
}
}
/**
* Reads the map data from the given stream. This method is internally
* called by the read and read and readHacked method.
*
* @param stream
* The input stream
* @param tilemapOffset
* The offset of the tilemap
* @param mapSize
* The size of the map
* @param compressedTileMap
* Defines if the tilemap is compressed and therefor must be
* decompressed first
* @throws IOException
* When file operation fails.
*/
private void readMapData(final SeekableInputStream stream,
final int tilemapOffset, final int mapSize,
final boolean compressedTileMap) throws IOException
{
CentralDirectory centralDirectory;
SpecialActionTable specialActionTable;
int monsterDataOffset;
// Read the action map
this.actionClassMap = ActionClassMap.read(stream, mapSize);
// Read the action map
this.actionMap = ActionMap.read(stream, mapSize);
// Read the central directory
centralDirectory = CentralDirectory.read(stream);
// Read the map info
stream.skip(1);
this.info = Info.read(stream);
// Read the battle strings
this.battleStrings = BattleStrings.read(stream);
// Read the tiles map
stream.seek(tilemapOffset);
this.tileMap = TileMap.read(stream, compressedTileMap ? 0 : mapSize);
// Read the strings
stream.seek(centralDirectory.getStringsOffset());
this.strings = Strings.read(stream, tilemapOffset);
// Read the NPCs
stream.seek(centralDirectory.getNpcOffset());
this.npcs = NPCs.read(stream);
// Read the monsters if present
monsterDataOffset = centralDirectory.getMonsterDataOffset();
if (monsterDataOffset != 0)
{
int quantity;
quantity = (centralDirectory.getStringsOffset() - monsterDataOffset) / 8;
stream.seek(centralDirectory.getMonsterNamesOffset());
this.monsters = Monsters.read(stream, monsterDataOffset, quantity);
}
else
{
this.monsters = new Monsters();
}
// Sanitizes the central directory
centralDirectory.sanitizeCentralDirectory(this);
// Read the special action table
stream.seek(centralDirectory.getNibble6Offset());
specialActionTable = SpecialActionTable.read(stream, 128);
// Read the actions
for (int i = 1; i < 16; i++)
{
final int offset = centralDirectory.getActionClassOffset(i);
if (offset != 0)
{
stream.seek(offset);
this.actions
.put(i, Actions.read(i, stream, specialActionTable));
}
else
{
this.actions.put(i, new Actions());
}
}
}
/**
* Reads a hacked map (For Displacer's hacked EXE) from the given input
* stream.
*
* @param stream
* The input stream
* @return The map
* @throws IOException
* When file operation fails.
*/
public static GameMap readHacked(final InputStream stream)
throws IOException
{
int tilemapOffset, mapSize;
SeekableInputStream gameStream;
GameMap map;
gameStream = new SeekableInputStream(stream);
tilemapOffset = gameStream.readWord();
mapSize = gameStream.read();
if (tilemapOffset == -1 || mapSize == -1)
{
throw new IOException("Unexpected end of stream while reading map");
}
gameStream = new SeekableInputStream(stream);
map = new GameMap(mapSize, 0, 0);
map.readMapData(gameStream, tilemapOffset, mapSize, false);
return map;
}
/**
* Writes the map data to the specified output stream. This method is used
* internally by the write and writeHacked methods.
*
* @param stream
* The output stream
* @param compressTilemap
* If the tile map should be compressed
* @return The central directory
* @throws IOException
* When file operation fails.
*/
private CentralDirectory writeMapData(final OutputStream stream,
final boolean compressTilemap) throws IOException
{
SeekableOutputStream plainStream;
CentralDirectory centralDirectory;
int stringsOffset;
long directoryOffset;
SpecialActionTable specialActionTable;
plainStream = new SeekableOutputStream(stream);
// Write the action class map
this.actionClassMap.write(plainStream);
// Write the action map
this.actionMap.write(plainStream);
// Create the central directory and skip the space for it in the map
centralDirectory = new CentralDirectory();
directoryOffset = plainStream.tell();
plainStream.skip(44);
// Write the map size
plainStream.writeByte(this.mapSize);
// Write the map info
this.info.write(plainStream);
// Write the battle strings
this.battleStrings.write(plainStream);
// Build the special action table
specialActionTable = buildSpecialActionTable();
// Write the actions
for (int i = 1; i < 16; i++)
{
Actions actions;
actions = this.actions.get(i);
if (actions == null || actions.countActions() == 0)
{
continue;
}
centralDirectory.setActionClassOffset(i, (int) plainStream.tell());
actions.write(plainStream, specialActionTable);
}
// Write the special action table
if (specialActionTable.size() > 0)
{
centralDirectory.setNibble6Offset((int) plainStream.tell());
specialActionTable.write(plainStream);
}
// Write the NPCs
if (this.npcs.size() > 0)
{
centralDirectory.setNpcOffset((int) plainStream.tell());
this.npcs.write(plainStream);
}
// Write the monster names
centralDirectory.setMonsterNamesOffset((int) plainStream.tell());
this.monsters.writeNames(plainStream);
// Write the monster data
centralDirectory.setMonsterDataOffset((int) plainStream.tell());
this.monsters.writeData(plainStream);
// Write the strings
stringsOffset = (int) plainStream.tell();
centralDirectory.setStringsOffset(stringsOffset);
this.strings.write(plainStream);
// Add padding
if (compressTilemap)
{
if (plainStream.tell() > this.tilemapOffset)
{
log.warn("Too much data before tile map. Fixing "
+ "offsets in wl.exe is needed to run this game file");
}
else
{
plainStream.skip(this.tilemapOffset - plainStream.tell());
}
}
// Write the tile map
centralDirectory.setTilemapOffset((int) plainStream.tell());
this.tileMap.write(plainStream, compressTilemap);
// Add padding
if (compressTilemap)
{
if (plainStream.tell() > this.msqSize - 6)
{
log
.warn("Tilemap too large. Fixing offsets in wl.exe is needed "
+ "to run this game file");
}
else
{
plainStream.skip(this.msqSize - 6 - plainStream.tell());
}
}
// Write the central directory
plainStream.seek(directoryOffset);
centralDirectory.write(plainStream);
// Flush the stream, it's complete now
plainStream.flush();
return centralDirectory;
}
/**
* Writes the map to the specified output stream.
*
* @param stream
* The output stream
* @param disk
* The disk id (0 or 1)
* @throws IOException
* When file operation fails.
*/
public void write(final OutputStream stream, final int disk)
throws IOException
{
ByteArrayOutputStream byteStream;
RotatingXorOutputStream xorStream;
byte[] bytes;
CentralDirectory centralDirectory;
int stringsOffset;
byteStream = new ByteArrayOutputStream();
centralDirectory = writeMapData(byteStream, true);
bytes = byteStream.toByteArray();
stringsOffset = centralDirectory.getStringsOffset();
// Write the MSQ header
stream.write("msq".getBytes());
stream.write('0' + disk);
// Write the encrypted data
xorStream = new RotatingXorOutputStream(stream);
xorStream.write(bytes, 0, stringsOffset);
xorStream.flush();
// Write the unencrypted data
stream.write(bytes, stringsOffset, bytes.length - stringsOffset);
}
/**
* Writes an external map file compatible to Displacer's hacked EXE file.
*
* @param stream
* The stream to write the map to
* @throws IOException
* When file operation fails.
*/
public void writeHacked(final OutputStream stream) throws IOException
{
ByteArrayOutputStream byteStream;
byte[] bytes;
CentralDirectory centralDirectory;
int tilemapOffset;
byteStream = new ByteArrayOutputStream();
centralDirectory = writeMapData(byteStream, false);
tilemapOffset = centralDirectory.getTilemapOffset();
bytes = byteStream.toByteArray();
// Write the data
stream.write(tilemapOffset & 255);
stream.write(tilemapOffset >> 8);
stream.write(this.mapSize);
stream.write(bytes);
}
/**
* Builds the special action table by looking at the actions in action class
* 6.
*
* @return The special action table
*/
private SpecialActionTable buildSpecialActionTable()
{
Actions actions;
SpecialActionTable specialActionTable;
specialActionTable = new SpecialActionTable();
actions = this.actions.get(6);
if (actions != null)
{
for (int i = 0, max = actions.countActions(); i < max; i++)
{
final Action specialAction = actions.getAction(i);
if (specialAction instanceof SpecialAction)
{
final SpecialAction action = (SpecialAction) actions
.getAction(i);
if (action != null)
{
final int id = action.getAction();
if (!specialActionTable.contains(id))
{
specialActionTable.add(id);
}
}
}
}
}
return specialActionTable;
}
/**
* Creates and returns a new game map from XML.
*
* @param element
* The XML root element
* @return The Game Map
*/
public static GameMap read(final Element element)
{
GameMap gameMap;
int mapSize;
int msqSize;
int tilemapOffset;
// Read map configuration
mapSize = StringUtils.toInt(element.attributeValue("mapSize"));
msqSize = StringUtils.toInt(element.attributeValue("msqSize", "0"));
tilemapOffset = StringUtils.toInt(element.attributeValue(
"tilemapOffset", "0"));
// Create the new map
gameMap = new GameMap(mapSize, msqSize, tilemapOffset);
// Parse the action map
gameMap.actionClassMap = ActionClassMap.read(element
.element("actionClassMap"), mapSize);
// Parse the action map
gameMap.actionMap = ActionMap.read(element.element("actionMap"),
mapSize);
// Parse the map info
gameMap.info = Info.read(element.element("info"));
// Parse the battle strings
gameMap.battleStrings = BattleStrings.read(element
.element("battleStrings"));
// Read the actions
for (final Object item: element.elements("actions"))
{
final Element subElement = (Element) item;
int actionClass;
actionClass = StringUtils.toInt(subElement
.attributeValue("actionClass"));
gameMap.actions.put(actionClass, Actions.read(subElement));
}
// Parse the tile map
gameMap.tileMap = TileMap.read(element.element("tileMap"), mapSize,
gameMap.info.getBackgroundTile());
// Parse the strings
gameMap.strings = Strings.read(element.element("strings"));
// Parse the monsters
gameMap.monsters = Monsters.read(element.element("monsters"));
// Parse the NPCs
gameMap.npcs = NPCs.read(element.element("npcs"));
return gameMap;
}
/**
* Reads a game map from the specified XML stream.
*
* @param stream
* The input stream
* @return The game map
*/
public static GameMap readXml(final InputStream stream)
{
Document document;
Element element;
document = XmlUtils.readDocument(stream);
element = document.getRootElement();
return read(element);
}
/**
* @see de.ailis.wlandsuite.game.blocks.GameBlock#toXml()
*/
@Override
public Element toXml()
{
Element element;
// Create the root element
element = XmlUtils.createElement("map");
element.addAttribute("mapSize", Integer.toString(this.mapSize));
if (this.msqSize != 0)
{
element.addAttribute("msqSize", Integer.toString(this.msqSize));
}
if (this.tilemapOffset != 0)
{
element.addAttribute("tilemapOffset", Integer
.toString(this.tilemapOffset));
}
// Add the action map
element.add(this.actionClassMap.toXml());
// Add the action map
element.add(this.actionMap.toXml(this.actionClassMap));
// Add the map info
element.add(this.info.toXml());
// Add the battle strings
element.add(this.battleStrings.toXml());
// Add the actions
for (int i = 1; i < 16; i++)
{
Actions actions;
actions = this.actions.get(i);
if (actions != null && actions.countActions() > 0)
{
element.add(actions.toXml(i));
}
}
// Add the NPCs
element.add(this.npcs.toXml());
// Add the monsters
element.add(this.monsters.toXml());
// Add the strings
element.add(this.strings.toXml());
// Add the tiles map
element.add(this.tileMap.toXml(this.info.getBackgroundTile()));
// Return the XMl element
return element;
}
/**
* Returns the size of the encrypted part in the map block. To do this it
* needs at least 6146 decrypted bytes from the map block.
*
* @param bytes
* The (decrypted) block data
* @param mapSize
* The map size
* @return The size of the encrypted part
*/
private static int determineEncryptionSize(final byte[] bytes,
final int mapSize)
{
int offset;
offset = mapSize * mapSize * 3 / 2;
return ((bytes[offset] & 0xff) | ((bytes[offset + 1] & 0xff) << 8));
}
/**
* Determines the map size by just looking at the MSQ block bytes. For this
* it needs at least 6189 unencrypted bytes. Throws a GameException if it
* was not able to determine the map size.
*
* @param bytes
* The MSQ block bytes
* @return The map size.
*/
private static int determineMapSize(final byte[] bytes)
{
int offset;
boolean is32, is64;
// Check if map can be size 64
is64 = false;
offset = 64 * 64 * 3 / 2;
if ((offset + 44 < bytes.length)
&& (bytes[offset + 44] == 64 && bytes[offset + 6] == 0 && bytes[offset + 7] == 0))
{
is64 = true;
}
// Check if map can be size 3
is32 = false;
offset = 32 * 32 * 3 / 2;
if ((offset + 44 < bytes.length && bytes[offset + 6] == 0 && bytes[offset + 7] == 0)
&& (bytes[offset + 44] == 32))
{
is32 = true;
}
// Complain if map can be both sizes
if (!is32 && !is64)
{
throw new GameException(
"Cannot determine map size: Map is not a 32 or 64 size map");
}
if (is32 && is64)
{
throw new GameException(
"Cannot determine map size: Map could be a 32 or 64 size map");
}
return is32 ? 32 : 64;
}
/**
* Determines the tiles offset by just looking at the MSQ block bytes.
*
* @param bytes
* The MSQ block bytes
* @param mapSize
* The map size
* @return The tiles offset
*/
private static int determineTilesOffset(final byte[] bytes,
final int mapSize)
{
int i = bytes.length - 9;
while (i > 0)
{
if ((bytes[i] == 0) && (bytes[i + 1] == ((mapSize * mapSize) >> 8))
&& (bytes[i + 2] == 0) && (bytes[i + 3] == 0)
&& (bytes[i + 6] == 0) && (bytes[i + 7] == 0))
{
return i;
}
i--;
}
throw new GameException("Unable to find tiles offset for size "
+ mapSize + " map");
}
/**
* Returns the map size. This is normally 64 or 32. Maps are always
* quadratic. It's not possible to have different widths and heights.
*
* @return The map size
*/
public int getMapSize()
{
return this.mapSize;
}
/**
* Returns the MSQ block size.
*
* @return The MSQ block size
*/
public int getMsqSize()
{
return this.msqSize;
}
/**
* Returns the action class map.
*
* @return The action class map
*/
public ActionClassMap getActionClassMap()
{
return this.actionClassMap;
}
/**
* Sets the action class map.
*
* @param actionClassMap
* The action class map to set
*/
public void setActionClassMap(final ActionClassMap actionClassMap)
{
this.actionClassMap = actionClassMap;
}
/**
* Returns the actionMap.
*
* @return The actionMap
*/
public ActionMap getActionMap()
{
return this.actionMap;
}
/**
* Sets the actionMap.
*
* @param actionMap
* The actionMap to set
*/
public void setActionMap(final ActionMap actionMap)
{
this.actionMap = actionMap;
}
/**
* Returns the battleStrings.
*
* @return The battleStrings
*/
public BattleStrings getBattleStrings()
{
return this.battleStrings;
}
/**
* Sets the battleStrings.
*
* @param battleStrings
* The battleStrings to set
*/
public void setBattleStrings(final BattleStrings battleStrings)
{
this.battleStrings = battleStrings;
}
/**
* Returns the info.
*
* @return The info
*/
public Info getInfo()
{
return this.info;
}
/**
* Sets the info.
*
* @param info
* The info to set
*/
public void setInfo(final Info info)
{
this.info = info;
}
/**
* Returns the monsters.
*
* @return The monsters
*/
public Monsters getMonsters()
{
return this.monsters;
}
/**
* Sets the monsters.
*
* @param monsters
* The monsters to set
*/
public void setMonsters(final Monsters monsters)
{
this.monsters = monsters;
}
/**
* Returns the npcs.
*
* @return The npcs
*/
public NPCs getNpcs()
{
return this.npcs;
}
/**
* Sets the npcs.
*
* @param npcs
* The npcs to set
*/
public void setNpcs(final NPCs npcs)
{
this.npcs = npcs;
}
/**
* Returns the strings.
*
* @return The strings
*/
public Strings getStrings()
{
return this.strings;
}
/**
* Sets the strings.
*
* @param strings
* The strings to set
*/
public void setStrings(final Strings strings)
{
this.strings = strings;
}
/**
* Returns the tileMap.
*
* @return The tileMap
*/
public TileMap getTileMap()
{
return this.tileMap;
}
/**
* Sets the tileMap.
*
* @param tileMap
* The tileMap to set
*/
public void setTileMap(final TileMap tileMap)
{
this.tileMap = tileMap;
}
/**
* Returns the tilemapOffset.
*
* @return The tilemapOffset
*/
public int getTilemapOffset()
{
return this.tilemapOffset;
}
/**
* Sets the tilemapOffset.
*
* @param tilemapOffset
* The tilemapOffset to set
*/
public void setTilemapOffset(final int tilemapOffset)
{
this.tilemapOffset = tilemapOffset;
}
/**
* Sets the mapSize.
*
* @param mapSize
* The mapSize to set
*/
public void setMapSize(final int mapSize)
{
this.mapSize = mapSize;
}
/**
* Sets the msqSize.
*
* @param msqSize
* The msqSize to set
*/
public void setMsqSize(final int msqSize)
{
this.msqSize = msqSize;
}
}
| |
/*
* Copyright 2009 Aleksandar Seovic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seovic.core.objects;
import com.seovic.core.util.Convert;
import com.tangosol.io.pof.PofReader;
import com.tangosol.io.pof.PofWriter;
import com.tangosol.io.pof.PortableObject;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.mvel2.integration.PropertyHandler;
import org.mvel2.integration.VariableResolverFactory;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.BeanWrapperImpl;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.util.Assert;
/**
* Object that supports dynamic properties.
*
* @author Aleksandar Seovic 2009.11.05
*/
@SuppressWarnings({"unchecked", "deprecation"})
@XmlRootElement(name = "object")
public class DynamicObject
implements Serializable, PortableObject {
// ---- constructors ----------------------------------------------------
/**
* Default constructor.
*/
public DynamicObject() {
m_properties = createPropertyMap();
}
/**
* Construct <tt>DynamicObject</tt> based on existing JavaBean.
* <p/>
* Constructed object will contain all readable properties of the specified JavaBean.
*
* @param bean a JavaBean to initialize this dynamic object with
*/
public DynamicObject(Object bean) {
m_properties = createPropertyMap();
merge(bean);
}
/**
* Construct <tt>DynamicObject</tt> based on existing JavaBean.
* <p/>
* Constructed object will contain only specific properties of the specified JavaBean.
*
* @param bean a JavaBean to initialize this dynamic object with
* @param properties properties to extract from the specified JavaBean
*/
public DynamicObject(Object bean, PropertyList properties) {
m_properties = createPropertyMap();
merge(bean, properties);
}
/**
* Construct <tt>DynamicObject</tt> based on existing Map.
* <p/>
* Constructed object will contain all entries from the specified map.
*
* @param map a map to initialize this dynamic object with
*/
public DynamicObject(Map<String, Object> map) {
m_properties = createPropertyMap();
merge(map);
}
// ---- public API ------------------------------------------------------
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public Object getValue(String name) {
return m_properties.get(name);
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setValue(String name, Object value) {
m_properties.put(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public boolean getBoolean(String name) {
return Convert.toBoolean(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setBoolean(String name, boolean value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public byte getByte(String name) {
return Convert.toByte(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setByte(String name, byte value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public char getChar(String name) {
return Convert.toChar(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setChar(String name, char value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public short getShort(String name) {
return Convert.toShort(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setShort(String name, short value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public int getInt(String name) {
return Convert.toInt(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setInt(String name, int value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public long getLong(String name) {
return Convert.toLong(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setLong(String name, long value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public float getFloat(String name) {
return Convert.toFloat(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setFloat(String name, float value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public double getDouble(String name) {
return Convert.toDouble(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setDouble(String name, double value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public BigDecimal getBigDecimal(String name) {
return Convert.toBigDecimal(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setBigDecimal(String name, BigDecimal value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public String getString(String name) {
return getValue(name).toString();
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setString(String name, String value) {
setValue(name, value);
}
/**
* Return property value for the specified name.
*
* @param name property name
*
* @return value of the specified property
*/
public Date getDate(String name) {
return Convert.toDate(getValue(name));
}
/**
* Set value of the specified property.
*
* @param name property name
* @param value property value
*/
public void setDate(String name, Date value) {
setValue(name, value);
}
/**
* Merge all properties from the specified dynamic object into this one.
* <p/>
* Any properties with the same name that already exist in this object will be overwritten.
*
* @param obj object to merge into this object
*/
public void merge(DynamicObject obj) {
if (obj == null) {
throw new IllegalArgumentException(
"Object to merge cannot be null");
}
m_properties.putAll(obj.m_properties);
}
/**
* Merge all properties from the specified object into this one.
* <p/>
* Any properties with the same name that already exist in this object will be overwritten.
*
* @param obj object to merge into this object
*/
public void merge(Object obj) {
if (obj == null) {
throw new IllegalArgumentException(
"Object to merge cannot be null");
}
m_properties.putAll(getPropertyMap(obj));
}
/**
* Merge specified properties from the specified object into this one.
* <p/>
* Any properties with the same name that already exist in this object will be overwritten.
*
* @param obj object to merge into this object
* @param properties properties to merge
*/
public void merge(Object obj, PropertyList properties) {
if (obj == null) {
throw new IllegalArgumentException(
"Object to merge cannot be null");
}
for (PropertySpec property : properties) {
String name = property.getName();
Object value = property.getValue(obj);
if (value != null && property.getPropertyList() != null) {
if (value instanceof Collection) {
List colValues = new ArrayList(((Collection) value).size());
for (Object o : (Collection) value) {
colValues.add(new DynamicObject(o,
property.getPropertyList()));
}
value = colValues;
}
else {
value = new DynamicObject(value,
property.getPropertyList());
}
}
m_properties.put(name, value);
}
}
/**
* Merge all entries from the specified map into this object.
* <p/>
* Any properties with the same name that already exist in this object will be overwritten.
*
* @param map ma[ to merge into this object
*/
public void merge(Map<String, Object> map) {
if (map == null) {
throw new IllegalArgumentException("Map to merge cannot be null");
}
m_properties.putAll(map);
}
/**
* Update specified target from this object.
*
* @param target target object to update
*/
public void update(Object target) {
if (target == null) {
throw new IllegalArgumentException(
"Target to update cannot be null");
}
BeanWrapper bw = new BeanWrapperImpl(target);
bw.registerCustomEditor(Date.class, new CustomDateEditor(new SimpleDateFormat("yyyy-MM-dd"), true));
for (Map.Entry<String, Object> property : m_properties.entrySet()) {
String propertyName = property.getKey();
Object value = property.getValue();
if (value instanceof Map) {
PropertyDescriptor pd = bw.getPropertyDescriptor(propertyName);
if (!Map.class.isAssignableFrom(pd.getPropertyType()) || pd.getWriteMethod() == null) {
value = new DynamicObject((Map<String, Object>) value);
}
}
if (value instanceof DynamicObject) {
((DynamicObject) value).update(bw.getPropertyValue(propertyName));
}
else {
bw.setPropertyValue(propertyName, value);
}
}
}
// ---- internal API ----------------------------------------------------
public static Map<String, Object> getPropertyMap(Object obj) {
Assert.notNull(obj, "Argument cannot be null");
try {
BeanInfo beanInfo = Introspector.getBeanInfo(obj.getClass());
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
Map<String, Object> propertyMap = new HashMap<String, Object>(propertyDescriptors.length);
for (PropertyDescriptor pd : propertyDescriptors) {
Method getter = pd.getReadMethod();
if (getter != null) {
propertyMap.put(pd.getName(), getter.invoke(obj));
}
}
return propertyMap;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Factory method that creates internal property map.
*
* @return internal property map instance
*/
protected Map<String, Object> createPropertyMap() {
return new LinkedHashMap<String, Object>();
}
/**
* Return internal property map.
*
* @return internal property map
*/
protected Map<String, Object> getProperties() {
return m_properties;
}
/**
* Set internal property map.
*
* @param properties internal property map
*/
protected void setProperties(Map<String, Object> properties) {
m_properties = properties;
}
// ---- PortableObject implementation -----------------------------------
@Override
@SuppressWarnings({"unchecked"})
public void readExternal(PofReader reader)
throws IOException {
reader.readMap(0, m_properties);
}
@Override
public void writeExternal(PofWriter writer)
throws IOException {
writer.writeMap(0, m_properties);
}
// ---- Object methods --------------------------------------------------
/**
* Test objects for equality.
*
* @param o object to compare this object with
*
* @return <tt>true</tt> if the specified object is equal to this object <tt>false</tt> otherwise
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || !(o instanceof DynamicObject)) {
return false;
}
DynamicObject dynObj = (DynamicObject) o;
return m_properties.equals(dynObj.m_properties);
}
/**
* Return hash code for this object.
*
* @return this object's hash code
*/
@Override
public int hashCode() {
return m_properties.hashCode();
}
/**
* Return string representation of this object.
*
* @return string representation of this object
*/
@Override
public String toString() {
return getClass().getSimpleName() + "{properties=" + m_properties + '}';
}
// ---- JAXB support ----------------------------------------------------
public static class ObjectType {
@XmlElement(name = "property")
public List<PropertyType> propertyList = new LinkedList<PropertyType>();
}
public static class PropertyType {
@XmlAttribute
public String name;
@XmlElement
public Object value;
public PropertyType() {
}
public PropertyType(String name, Object value) {
this.name = name;
this.value = value;
}
}
public static class Adapter
extends XmlAdapter<ObjectType, Map<String, Object>> {
@Override
public Map<String, Object> unmarshal(ObjectType type)
throws Exception {
Map<String, Object> result = new LinkedHashMap<String, Object>(
type.propertyList.size());
for (PropertyType property : type.propertyList) {
result.put(property.name, property.value);
}
return result;
}
@Override
public ObjectType marshal(Map<String, Object> properties)
throws Exception {
ObjectType result = new ObjectType();
for (Map.Entry<String, Object> property : properties.entrySet()) {
result.propertyList.add(new PropertyType(property.getKey(),
property.getValue()));
}
return result;
}
}
// ---- inner class: MvelPropertyHandler --------------------------------
public static class MvelPropertyHandler implements PropertyHandler {
@Override
public Object getProperty(String name, Object o, VariableResolverFactory variableResolverFactory) {
return ((DynamicObject) o).getValue(name);
}
@Override
public Object setProperty(String name, Object o, VariableResolverFactory variableResolverFactory, Object value) {
((DynamicObject) o).setValue(name, value);
return value;
}
}
// ---- data members ----------------------------------------------------
@XmlJavaTypeAdapter(Adapter.class)
@XmlElement(name = "properties")
private Map<String, Object> m_properties;
}
| |
/**
*
* Copyright (c) Microsoft and contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
package com.microsoft.windowsazure.management.compute.models;
import com.microsoft.windowsazure.core.LazyArrayList;
import java.net.URI;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Parameters supplied to the Update Virtual Machine Image operation.
*/
public class VirtualMachineVMImageUpdateParameters {
private ArrayList<DataDiskConfigurationUpdateParameters> dataDiskConfigurations;
/**
* Optional. Optional. The Data Disk Configurations.
* @return The DataDiskConfigurations value.
*/
public ArrayList<DataDiskConfigurationUpdateParameters> getDataDiskConfigurations() {
return this.dataDiskConfigurations;
}
/**
* Optional. Optional. The Data Disk Configurations.
* @param dataDiskConfigurationsValue The DataDiskConfigurations value.
*/
public void setDataDiskConfigurations(final ArrayList<DataDiskConfigurationUpdateParameters> dataDiskConfigurationsValue) {
this.dataDiskConfigurations = dataDiskConfigurationsValue;
}
private String description;
/**
* Optional. Specifies the description of the OS image.
* @return The Description value.
*/
public String getDescription() {
return this.description;
}
/**
* Optional. Specifies the description of the OS image.
* @param descriptionValue The Description value.
*/
public void setDescription(final String descriptionValue) {
this.description = descriptionValue;
}
private String eula;
/**
* Optional. Specifies the End User License Agreement that is associated
* with the image. The value for this element is a string, but it is
* recommended that the value be a URL that points to a EULA.
* @return The Eula value.
*/
public String getEula() {
return this.eula;
}
/**
* Optional. Specifies the End User License Agreement that is associated
* with the image. The value for this element is a string, but it is
* recommended that the value be a URL that points to a EULA.
* @param eulaValue The Eula value.
*/
public void setEula(final String eulaValue) {
this.eula = eulaValue;
}
private URI iconUri;
/**
* Optional. Specifies the URI to the icon that is displayed for the image
* in the Management Portal.
* @return The IconUri value.
*/
public URI getIconUri() {
return this.iconUri;
}
/**
* Optional. Specifies the URI to the icon that is displayed for the image
* in the Management Portal.
* @param iconUriValue The IconUri value.
*/
public void setIconUri(final URI iconUriValue) {
this.iconUri = iconUriValue;
}
private String imageFamily;
/**
* Optional. Specifies a value that can be used to group OS images.
* @return The ImageFamily value.
*/
public String getImageFamily() {
return this.imageFamily;
}
/**
* Optional. Specifies a value that can be used to group OS images.
* @param imageFamilyValue The ImageFamily value.
*/
public void setImageFamily(final String imageFamilyValue) {
this.imageFamily = imageFamilyValue;
}
private String label;
/**
* Required. Specifies the friendly name of the image to be updated. You
* cannot use this operation to update images provided by the Azure
* platform.
* @return The Label value.
*/
public String getLabel() {
return this.label;
}
/**
* Required. Specifies the friendly name of the image to be updated. You
* cannot use this operation to update images provided by the Azure
* platform.
* @param labelValue The Label value.
*/
public void setLabel(final String labelValue) {
this.label = labelValue;
}
private String language;
/**
* Optional. Specifies the language of the image.
* @return The Language value.
*/
public String getLanguage() {
return this.language;
}
/**
* Optional. Specifies the language of the image.
* @param languageValue The Language value.
*/
public void setLanguage(final String languageValue) {
this.language = languageValue;
}
private OSDiskConfigurationUpdateParameters oSDiskConfiguration;
/**
* Optional. Optional. The OS Disk Configuration.
* @return The OSDiskConfiguration value.
*/
public OSDiskConfigurationUpdateParameters getOSDiskConfiguration() {
return this.oSDiskConfiguration;
}
/**
* Optional. Optional. The OS Disk Configuration.
* @param oSDiskConfigurationValue The OSDiskConfiguration value.
*/
public void setOSDiskConfiguration(final OSDiskConfigurationUpdateParameters oSDiskConfigurationValue) {
this.oSDiskConfiguration = oSDiskConfigurationValue;
}
private URI privacyUri;
/**
* Optional. Specifies the URI that points to a document that contains the
* privacy policy related to the OS image.
* @return The PrivacyUri value.
*/
public URI getPrivacyUri() {
return this.privacyUri;
}
/**
* Optional. Specifies the URI that points to a document that contains the
* privacy policy related to the OS image.
* @param privacyUriValue The PrivacyUri value.
*/
public void setPrivacyUri(final URI privacyUriValue) {
this.privacyUri = privacyUriValue;
}
private Calendar publishedDate;
/**
* Optional. Specifies the date when the OS image was added to the image
* repository.
* @return The PublishedDate value.
*/
public Calendar getPublishedDate() {
return this.publishedDate;
}
/**
* Optional. Specifies the date when the OS image was added to the image
* repository.
* @param publishedDateValue The PublishedDate value.
*/
public void setPublishedDate(final Calendar publishedDateValue) {
this.publishedDate = publishedDateValue;
}
private String recommendedVMSize;
/**
* Optional. Specifies the size to use for the virtual machine that is
* created from the OS image.
* @return The RecommendedVMSize value.
*/
public String getRecommendedVMSize() {
return this.recommendedVMSize;
}
/**
* Optional. Specifies the size to use for the virtual machine that is
* created from the OS image.
* @param recommendedVMSizeValue The RecommendedVMSize value.
*/
public void setRecommendedVMSize(final String recommendedVMSizeValue) {
this.recommendedVMSize = recommendedVMSizeValue;
}
private Boolean showInGui;
/**
* Optional. Optional. True or False.
* @return The ShowInGui value.
*/
public Boolean isShowInGui() {
return this.showInGui;
}
/**
* Optional. Optional. True or False.
* @param showInGuiValue The ShowInGui value.
*/
public void setShowInGui(final Boolean showInGuiValue) {
this.showInGui = showInGuiValue;
}
private URI smallIconUri;
/**
* Optional. Specifies the URI to the small icon that is displayed when the
* image is presented in the Azure Management Portal.
* @return The SmallIconUri value.
*/
public URI getSmallIconUri() {
return this.smallIconUri;
}
/**
* Optional. Specifies the URI to the small icon that is displayed when the
* image is presented in the Azure Management Portal.
* @param smallIconUriValue The SmallIconUri value.
*/
public void setSmallIconUri(final URI smallIconUriValue) {
this.smallIconUri = smallIconUriValue;
}
/**
* Initializes a new instance of the VirtualMachineVMImageUpdateParameters
* class.
*
*/
public VirtualMachineVMImageUpdateParameters() {
this.setDataDiskConfigurations(new LazyArrayList<DataDiskConfigurationUpdateParameters>());
}
/**
* Initializes a new instance of the VirtualMachineVMImageUpdateParameters
* class with required arguments.
*
* @param label The label.
*/
public VirtualMachineVMImageUpdateParameters(String label) {
this();
if (label == null) {
throw new NullPointerException("label");
}
this.setLabel(label);
}
}
| |
/**
Copyright [2011] [Dorian Cussen]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package couk.doridori.android.lib.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Handler;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.TextView;
import couk.doridori.android.lib.R;
/**
* Copyright (C) 2011 Dorian Cussen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*
* Like a normal frameLayout apart from this will swap out its views
* depending on some data state.
*
* You should pass in layout file ids to the empty and loading resId xml
* attrbutes and an id into the content view attr
*
* If you supply your own error layout it must have a textView with the id
* 'state_error_text'
*
* If your going to use the same loading / error / empty views (with differnt text) just set them in the source below and make sure the contents ids for the textViews match
*
* Will auto hide all children on start
*
* <b>WARNING - Samsung s3 running 4.0.4 (possibly a 4.0.4 bug) cannot handle a view changing from GONE to VISIBLE with <code>animateLayoutChanges=true</code>. As this is a Framelayout you can either change to INVISIBLE instead of GONE (less efficent as will still be measured when not vis) OR implement custom show hide anims for this class. Prob best to just not use animateLayoutChanges. Custom animations solution is untested however :)<b/> Think this has something to do with view invlidation as a PTR etc will then show the view
*
* Animations can be setup but using layoutTransitions = true in the manifest (unless they have been globally disabled in the user settings)
*
* TODO: should add the child views the same way as done for AOSP views i.e https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/core/java/android/widget/DatePicker.java - and not via a post in onLayout. Causing issues like not being abel to grab child views after setContentView is called and also showing two states at once etc
*
* TODO: should add children as xml (could use include) and either use id in paretn to specify type or attributes on children - then dont have to mess around with programming view adding and just rely on normal inflate
*
*/
public class FrameLayoutWithState extends FrameLayout {
private ViewState mCurrentViewState = ViewState.NOT_INIT;
private int mLoadingResId, mEmptyResId, mContentResId, mErrorResId;
private View mLoadingView, mEmptyView, mContentView, mErrorView;
private String mErrorText = null;
private String mEmptyText;
private OnClickListener mErrorClickListener;
public FrameLayoutWithState(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
getCustomAttrs(context, attrs);
inflateStateViews();
}
public FrameLayoutWithState(Context context, AttributeSet attrs) {
super(context, attrs);
getCustomAttrs(context, attrs);
inflateStateViews();
}
public FrameLayoutWithState(Context context) {
super(context);
throw new RuntimeException("Use a constructor with attrs");
}
private void getCustomAttrs(Context context, AttributeSet attrs) {
TypedArray array = context.obtainStyledAttributes(attrs, R.styleable.FrameLayoutWithState);
// get state layout res id's if present, else use default
mEmptyResId = array.getResourceId(R.styleable.FrameLayoutWithState_emptyView,
R.layout.element_data_state_empty);
mLoadingResId = array.getResourceId(R.styleable.FrameLayoutWithState_loadingView,
R.layout.element_data_state_loading);
mErrorResId = array.getResourceId(R.styleable.FrameLayoutWithState_errorView,
R.layout.element_data_state_error);
if (array.hasValue(R.styleable.FrameLayoutWithState_contentView) == false)
throw new RuntimeException("need to set contentView attr");
mContentResId = array.getResourceId(R.styleable.FrameLayoutWithState_contentView, -1);
array.recycle();
}
private void inflateStateViews() {
LayoutInflater layoutInflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
//content view is obtained on first state manipulation. This is becuase this method is called in the contructor which would be before any XML views have actually been inflated and added by the system.
mLoadingView = layoutInflater.inflate(mLoadingResId, this, false);
mLoadingView.setVisibility(View.GONE);
addView(mLoadingView);
mEmptyView = layoutInflater.inflate(mEmptyResId, this, false);
mEmptyView.setVisibility(View.GONE);
addView(mEmptyView);
mErrorView = layoutInflater.inflate(mErrorResId, this, false);
mErrorView.setVisibility(View.GONE);
addView(mErrorView);
}
public void setViewState(ViewState newViewState) {
setViewState(newViewState, true);
}
/**
* @param newViewState
* @param animate true if should animate when showing content
*/
public void setViewState(ViewState newViewState, boolean animate) {
mCurrentViewState = newViewState;
showViewBasedOnState(animate);
}
/**
* @param msg can not be null
*/
public void setViewStateError(String msg) {
mCurrentViewState = ViewState.ERROR;
mErrorText = msg;
showViewBasedOnState(true);
setErrorText(mErrorText);
}
/**
* @param msg can not be null
*/
public void setViewStateEmpty(String msg) {
mCurrentViewState = ViewState.EMPTY;
mEmptyText = msg;
showViewBasedOnState(true);
setEmptyText(mEmptyText);
}
public void setOnClickForError(OnClickListener onClickListener)
{
mErrorView.setOnClickListener(onClickListener);
}
/**
* If a custom error view has been used it will have to include a textView with the ID R.id.state_error_text for
* this method to not throw an exception!
*
* @param errorTxt can not be null
*/
private void setErrorText(String errorTxt) {
TextView errorTxtView = (TextView) findViewById(R.id.state_error_text);
errorTxtView.setText(mErrorText);
}
/**
* If a custom empty view has been used it will have to include a textView with the ID R.id.state_empty_text for
* this method to not throw an exception!
*
* @param emptyText can not be null
*/
private void setEmptyText(String emptyText){
TextView emptyTxtView = (TextView) findViewById(R.id.state_empty_text);
emptyTxtView.setText(emptyText);
}
public ViewState getViewState() {
return mCurrentViewState;
}
/**
* @param animate true if should animate when showing content
*/
private void showViewBasedOnState(boolean animate) {
// first time this is called contentView ref should/will be null - see #inflateStateViews
if(mContentView == null)
{
mContentView = findViewById(mContentResId);
if (mContentView == null) {
throw new NullPointerException("contentView cannot be null, have you set the contentView attribute");
}
if(mContentView.getVisibility() != View.GONE)
throw new RuntimeException("need to set gone in xml or will flicker");
}
switch (mCurrentViewState) {
case NOT_INIT:
// hide all
mLoadingView.setVisibility(View.GONE);
mEmptyView.setVisibility(View.GONE);
mContentView.setVisibility(View.GONE);
mErrorView.setVisibility(View.GONE);
break;
case CONTENT:
// show content view
mLoadingView.setVisibility(View.GONE);
mEmptyView.setVisibility(View.GONE);
mContentView.setVisibility(View.VISIBLE);
mErrorView.setVisibility(View.GONE);
break;
case EMPTY:
// show empty view
mLoadingView.setVisibility(View.GONE);
mEmptyView.setVisibility(View.VISIBLE);
mContentView.setVisibility(View.GONE);
mErrorView.setVisibility(View.GONE);
break;
case LOADING:
// show loading view
mLoadingView.setVisibility(View.VISIBLE);
mEmptyView.setVisibility(View.GONE);
mContentView.setVisibility(View.GONE);
mErrorView.setVisibility(View.GONE);
break;
case ERROR:
// show error view
mLoadingView.setVisibility(View.GONE);
mEmptyView.setVisibility(View.GONE);
mContentView.setVisibility(View.GONE);
mErrorView.setVisibility(View.VISIBLE);
break;
}
invalidate();
}
public static enum ViewState{
/**
* Loading has not started yet
*/
NOT_INIT,
/**
* Loading started
*/
LOADING,
/**
* Loading finished and empty data
*/
EMPTY,
/**
* Loading finished with success
*/
CONTENT,
/**
* Loading finished with error
*/
ERROR;
}
}
| |
/*
* Copyright (c) 2009-2020 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.util;
import java.util.*;
/**
* <p>Provides a list with similar modification semantics to java.util.concurrent's
* CopyOnWriteArrayList except that it is not concurrent and also provides
* direct access to the current array. This List allows modification of the
* contents while iterating as any iterators will be looking at a snapshot of
* the list at the time they were created. Similarly, access the raw internal
* array is only presenting a snap shot and so can be safely iterated while
* the list is changing.</p>
*
* <p>All modifications, including set() operations will cause a copy of the
* data to be created that replaces the old version. Because this list is
* not designed for threading concurrency it further optimizes the "many modifications"
* case by buffering them as a normal ArrayList until the next time the contents
* are accessed.</p>
*
* <p>Normal list modification performance should be equal to ArrayList in a
* many situations and always better than CopyOnWriteArrayList. Optimum usage
* is when modifications are done infrequently or in batches... as is often the
* case in a scene graph. Read operations perform superior to all other methods
* as the array can be accessed directly.</p>
*
* <p>Important caveats over normal java.util.Lists:</p>
* <ul>
* <li>Even though this class supports modifying the list, the subList() method
* returns a read-only list. This technically breaks the List contract.</li>
* <li>The ListIterators returned by this class only support the remove()
* modification method. add() and set() are not supported on the iterator.
* Even after ListIterator.remove() or Iterator.remove() is called, this change
* is not reflected in the iterator instance as it is still refering to its
* original snapshot.
* </ul>
*
* @version $Revision$
* @author Paul Speed
*/
public class SafeArrayList<E> implements List<E>, Cloneable {
// Implementing List directly to avoid accidentally acquiring
// incorrect or non-optimal behavior from AbstractList. For
// example, the default iterator() method will not work for
// this list.
// Note: given the particular use-cases this was intended,
// it would make sense to nerf the public mutators and
// make this publicly act like a read-only list.
// SafeArrayList-specific methods could then be exposed
// for the classes like Node and Spatial to use to manage
// the list. This was the callers couldn't remove a child
// without it being detached properly, for example.
private Class<E> elementType;
private List<E> buffer;
private E[] backingArray;
private int size = 0;
public SafeArrayList(Class<E> elementType) {
this.elementType = elementType;
}
public SafeArrayList(final Class<E> elementType, final int capacity) {
this.elementType = elementType;
this.buffer = new ArrayList<>(capacity);
}
public SafeArrayList(final Class<E> elementType, final Collection<? extends E> collection) {
this.elementType = elementType;
this.buffer = new ArrayList<>(collection);
this.size = buffer.size();
}
@Override
@SuppressWarnings("unchecked")
public SafeArrayList<E> clone() {
try {
SafeArrayList<E> clone = (SafeArrayList<E>)super.clone();
// Clone whichever backing store is currently active
if( backingArray != null ) {
clone.backingArray = backingArray.clone();
}
if( buffer != null ) {
clone.buffer = (List<E>)((ArrayList<E>)buffer).clone();
}
return clone;
} catch( CloneNotSupportedException e ) {
throw new AssertionError();
}
}
@SuppressWarnings("unchecked")
protected final <T> T[] createArray(Class<T> type, int size) {
return (T[])java.lang.reflect.Array.newInstance(type, size);
}
protected final E[] createArray(int size) {
return createArray(elementType, size);
}
/**
* Returns a current snapshot of this List's backing array that
* is guaranteed not to change through further List manipulation.
* Changes to this array may or may not be reflected in the list and
* should be avoided.
*/
public final E[] getArray() {
if( backingArray != null )
return backingArray;
if( buffer == null ) {
backingArray = createArray(0);
} else {
// Only keep the array or the buffer but never both at
// the same time. 1) it saves space, 2) it keeps the rest
// of the code safer.
backingArray = buffer.toArray( createArray(buffer.size()) );
buffer = null;
}
return backingArray;
}
protected final List<E> getBuffer() {
if( buffer != null )
return buffer;
if( backingArray == null ) {
buffer = new ArrayList<>();
} else {
// Only keep the array or the buffer but never both at
// the same time. 1) it saves space, 2) it keeps the rest
// of the code safer.
buffer = new ArrayList<>( Arrays.asList(backingArray) );
backingArray = null;
}
return buffer;
}
@Override
public final int size() {
return size;
}
@Override
public final boolean isEmpty() {
return size == 0;
}
@Override
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
@Override
public Iterator<E> iterator() {
return listIterator();
}
@Override
public Object[] toArray() {
return getArray();
}
@Override
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) {
E[] array = getArray();
if (a.length < array.length) {
return (T[])Arrays.copyOf(array, array.length, a.getClass());
}
System.arraycopy( array, 0, a, 0, array.length );
if (a.length > array.length) {
a[array.length] = null;
}
return a;
}
@Override
public boolean add(E e) {
boolean result = getBuffer().add(e);
size = getBuffer().size();
return result;
}
@Override
public boolean remove(Object o) {
boolean result = getBuffer().remove(o);
size = getBuffer().size();
return result;
}
@Override
public boolean containsAll(Collection<?> c) {
return Arrays.asList(getArray()).containsAll(c);
}
@Override
public boolean addAll(Collection<? extends E> c) {
boolean result = getBuffer().addAll(c);
size = getBuffer().size();
return result;
}
@Override
public boolean addAll(int index, Collection<? extends E> c) {
boolean result = getBuffer().addAll(index, c);
size = getBuffer().size();
return result;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean result = getBuffer().removeAll(c);
size = getBuffer().size();
return result;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean result = getBuffer().retainAll(c);
size = getBuffer().size();
return result;
}
@Override
public void clear() {
getBuffer().clear();
size = 0;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (o instanceof SafeArrayList) {
final Object[] targetArray = ((SafeArrayList) o).getArray();
final E[] array = getArray();
return Arrays.equals(targetArray, array);
} else if (!(o instanceof List)) {//covers null too
return false;
}
List other = (List)o;
Iterator i1 = iterator();
Iterator i2 = other.iterator();
while( i1.hasNext() && i2.hasNext() ) {
Object o1 = i1.next();
Object o2 = i2.next();
if( o1 == o2 )
continue;
if( o1 == null || !o1.equals(o2) )
return false;
}
return !(i1.hasNext() || i2.hasNext());
}
@Override
public int hashCode() {
// Exactly the hash code described in the List interface, basically
E[] array = getArray();
int result = 1;
for( E e : array ) {
result = 31 * result + (e == null ? 0 : e.hashCode());
}
return result;
}
@Override
public final E get(int index) {
if( backingArray != null )
return backingArray[index];
if( buffer != null )
return buffer.get(index);
throw new IndexOutOfBoundsException( "Index:" + index + ", Size:0" );
}
@Override
public E set(int index, E element) {
return getBuffer().set(index, element);
}
@Override
public void add(int index, E element) {
getBuffer().add(index, element);
size = getBuffer().size();
}
@Override
public E remove(int index) {
E result = getBuffer().remove(index);
size = getBuffer().size();
return result;
}
@Override
public int indexOf(Object o) {
E[] array = getArray();
for( int i = 0; i < array.length; i++ ) {
E element = array[i];
if( element == o ) {
return i;
}
if( element != null && element.equals(o) ) {
return i;
}
}
return -1;
}
@Override
public int lastIndexOf(Object o) {
E[] array = getArray();
for( int i = array.length - 1; i >= 0; i-- ) {
E element = array[i];
if( element == o ) {
return i;
}
if( element != null && element.equals(o) ) {
return i;
}
}
return -1;
}
@Override
public ListIterator<E> listIterator() {
return new ArrayIterator<E>(getArray(), 0);
}
@Override
public ListIterator<E> listIterator(int index) {
return new ArrayIterator<E>(getArray(), index);
}
@Override
public List<E> subList(int fromIndex, int toIndex) {
// So far JME doesn't use subList that I can see so I'm nerfing it.
List<E> raw = Arrays.asList(getArray()).subList(fromIndex, toIndex);
return Collections.unmodifiableList(raw);
}
@Override
public String toString() {
E[] array = getArray();
if( array.length == 0 ) {
return "[]";
}
StringBuilder sb = new StringBuilder();
sb.append('[');
for( int i = 0; i < array.length; i++ ) {
if( i > 0 )
sb.append( ", " );
E e = array[i];
sb.append( e == this ? "(this Collection)" : e );
}
sb.append(']');
return sb.toString();
}
protected class ArrayIterator<E> implements ListIterator<E> {
private E[] array;
private int next;
private int lastReturned;
protected ArrayIterator( E[] array, int index ) {
this.array = array;
this.next = index;
this.lastReturned = -1;
}
@Override
public boolean hasNext() {
return next != array.length;
}
@Override
public E next() {
if( !hasNext() )
throw new NoSuchElementException();
lastReturned = next++;
return array[lastReturned];
}
@Override
public boolean hasPrevious() {
return next != 0;
}
@Override
public E previous() {
if( !hasPrevious() )
throw new NoSuchElementException();
lastReturned = --next;
return array[lastReturned];
}
@Override
public int nextIndex() {
return next;
}
@Override
public int previousIndex() {
return next - 1;
}
@Override
public void remove() {
// This operation is not so easy to do but we will fake it.
// The issue is that the backing list could be completely
// different than the one this iterator is a snapshot of.
// We'll just remove(element) which in most cases will be
// correct. If the list had earlier .equals() equivalent
// elements then we'll remove one of those instead. Either
// way, none of those changes are reflected in this iterator.
SafeArrayList.this.remove( array[lastReturned] );
}
@Override
public void set(E e) {
throw new UnsupportedOperationException();
}
@Override
public void add(E e) {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.search;
import com.intellij.lang.Language;
import com.intellij.lang.LanguageParserDefinitions;
import com.intellij.lang.ParserDefinition;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.fileTypes.SyntaxHighlighterFactory;
import com.intellij.openapi.fileTypes.impl.CustomSyntaxTableFileType;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.cache.CacheUtil;
import com.intellij.psi.impl.cache.TodoCacheManager;
import com.intellij.psi.search.IndexPattern;
import com.intellij.psi.search.IndexPatternOccurrence;
import com.intellij.psi.search.IndexPatternProvider;
import com.intellij.psi.search.searches.IndexPatternSearch;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.Processor;
import com.intellij.util.QueryExecutor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.CharSequenceSubSequence;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author yole
*/
public class IndexPatternSearcher implements QueryExecutor<IndexPatternOccurrence, IndexPatternSearch.SearchParameters> {
@Override
public boolean execute(@NotNull final IndexPatternSearch.SearchParameters queryParameters,
@NotNull final Processor<IndexPatternOccurrence> consumer) {
final PsiFile file = queryParameters.getFile();
VirtualFile virtualFile = file.getVirtualFile();
if (file instanceof PsiBinaryFile || file instanceof PsiCompiledElement || virtualFile == null) {
return true;
}
final TodoCacheManager cacheManager = TodoCacheManager.SERVICE.getInstance(file.getProject());
final IndexPatternProvider patternProvider = queryParameters.getPatternProvider();
int count = patternProvider != null
? cacheManager.getTodoCount(virtualFile, patternProvider)
: cacheManager.getTodoCount(virtualFile, queryParameters.getPattern());
return count == 0 || executeImpl(queryParameters, consumer);
}
protected static boolean executeImpl(IndexPatternSearch.SearchParameters queryParameters,
Processor<IndexPatternOccurrence> consumer) {
final IndexPatternProvider patternProvider = queryParameters.getPatternProvider();
final PsiFile file = queryParameters.getFile();
TIntArrayList commentStarts = new TIntArrayList();
TIntArrayList commentEnds = new TIntArrayList();
final CharSequence chars = file.getViewProvider().getContents();
findCommentTokenRanges(file, chars, queryParameters.getRange(), commentStarts, commentEnds);
TIntArrayList occurrences = new TIntArrayList(1);
IndexPattern[] patterns = patternProvider != null ? patternProvider.getIndexPatterns() : null;
for (int i = 0; i < commentStarts.size(); i++) {
int commentStart = commentStarts.get(i);
int commentEnd = commentEnds.get(i);
occurrences.resetQuick();
if (patternProvider != null) {
for (int j = patterns.length - 1; j >=0; --j) {
if (!collectPatternMatches(patterns[j], chars, commentStart, commentEnd, file, queryParameters.getRange(), consumer, occurrences)) {
return false;
}
}
}
else {
if (!collectPatternMatches(queryParameters.getPattern(), chars, commentStart, commentEnd, file, queryParameters.getRange(),
consumer, occurrences)) {
return false;
}
}
}
return true;
}
private static final TokenSet COMMENT_TOKENS =
TokenSet.create(CustomHighlighterTokenType.LINE_COMMENT, CustomHighlighterTokenType.MULTI_LINE_COMMENT);
private static void findCommentTokenRanges(final PsiFile file,
final CharSequence chars,
final TextRange range,
final TIntArrayList commentStarts,
final TIntArrayList commentEnds) {
if (file instanceof PsiPlainTextFile) {
FileType fType = file.getFileType();
if (fType instanceof CustomSyntaxTableFileType) {
Lexer lexer = SyntaxHighlighterFactory.getSyntaxHighlighter(fType, file.getProject(), file.getVirtualFile()).getHighlightingLexer();
findComments(lexer, chars, range, COMMENT_TOKENS, commentStarts, commentEnds, null);
}
else {
commentStarts.add(0);
commentEnds.add(file.getTextLength());
}
}
else {
final FileViewProvider viewProvider = file.getViewProvider();
final Set<Language> relevantLanguages = viewProvider.getLanguages();
for (Language lang : relevantLanguages) {
final TIntArrayList commentStartsList = new TIntArrayList();
final TIntArrayList commentEndsList = new TIntArrayList();
final SyntaxHighlighter syntaxHighlighter =
SyntaxHighlighterFactory.getSyntaxHighlighter(lang, file.getProject(), file.getVirtualFile());
Lexer lexer = syntaxHighlighter.getHighlightingLexer();
TokenSet commentTokens = null;
IndexPatternBuilder builderForFile = null;
for (IndexPatternBuilder builder : Extensions.getExtensions(IndexPatternBuilder.EP_NAME)) {
Lexer lexerFromBuilder = builder.getIndexingLexer(file);
if (lexerFromBuilder != null) {
lexer = lexerFromBuilder;
commentTokens = builder.getCommentTokenSet(file);
builderForFile = builder;
}
}
if (builderForFile == null) {
final ParserDefinition parserDefinition = LanguageParserDefinitions.INSTANCE.forLanguage(lang);
if (parserDefinition != null) {
commentTokens = parserDefinition.getCommentTokens();
}
}
if (commentTokens != null) {
findComments(lexer, chars, range, commentTokens, commentStartsList, commentEndsList, builderForFile);
mergeCommentLists(commentStarts, commentEnds, commentStartsList, commentEndsList);
}
}
}
}
private static void mergeCommentLists(TIntArrayList commentStarts,
TIntArrayList commentEnds,
TIntArrayList commentStartsList,
TIntArrayList commentEndsList) {
if (commentStarts.isEmpty() && commentEnds.isEmpty()) {
commentStarts.add(commentStartsList.toNativeArray());
commentEnds.add(commentEndsList.toNativeArray());
return;
}
ContainerUtil.mergeSortedArrays(commentStarts, commentEnds, commentStartsList, commentEndsList);
}
private static void findComments(final Lexer lexer,
final CharSequence chars,
final TextRange range,
final TokenSet commentTokens,
final TIntArrayList commentStarts,
final TIntArrayList commentEnds,
final IndexPatternBuilder builderForFile) {
for (lexer.start(chars); ; lexer.advance()) {
IElementType tokenType = lexer.getTokenType();
if (tokenType == null) break;
if (range != null) {
if (lexer.getTokenEnd() <= range.getStartOffset()) continue;
if (lexer.getTokenStart() >= range.getEndOffset()) break;
}
boolean isComment = commentTokens.contains(tokenType) || CacheUtil.isInComments(tokenType);
if (isComment) {
final int startDelta = builderForFile != null ? builderForFile.getCommentStartDelta(lexer.getTokenType()) : 0;
final int endDelta = builderForFile != null ? builderForFile.getCommentEndDelta(lexer.getTokenType()) : 0;
int start = lexer.getTokenStart() + startDelta;
int end = lexer.getTokenEnd() - endDelta;
assert start <= end : "Invalid comment range: " +
new TextRange(start, end) +
"; lexer token range=" +
new TextRange(lexer.getTokenStart(), lexer.getTokenEnd()) +
"; delta=" +
new TextRange(startDelta, endDelta) +
"; lexer=" +
lexer +
"; builder=" +
builderForFile +
"; chars length:" +
chars.length();
assert end <= chars.length() : "Invalid comment end: " +
new TextRange(start, end) +
"; lexer token range=" +
new TextRange(lexer.getTokenStart(), lexer.getTokenEnd()) +
"; delta=" +
new TextRange(startDelta, endDelta) +
"; lexer=" +
lexer +
"; builder=" +
builderForFile +
"; chars length:" +
chars.length();
commentStarts.add(start);
commentEnds.add(end);
}
}
}
private static boolean collectPatternMatches(IndexPattern indexPattern,
CharSequence chars,
int commentStart,
int commentEnd,
PsiFile file,
TextRange range,
Processor<IndexPatternOccurrence> consumer,
TIntArrayList matches
) {
Pattern pattern = indexPattern.getPattern();
if (pattern != null) {
ProgressManager.checkCanceled();
CharSequence input = new CharSequenceSubSequence(chars, commentStart, commentEnd);
Matcher matcher = pattern.matcher(input);
while (true) {
//long time1 = System.currentTimeMillis();
boolean found = matcher.find();
//long time2 = System.currentTimeMillis();
//System.out.println("scanned text of length " + (lexer.getTokenEnd() - lexer.getTokenStart() + " in " + (time2 - time1) + " ms"));
if (!found) break;
int start = matcher.start() + commentStart;
int end = matcher.end() + commentStart;
if (start != end) {
if ((range == null || range.getStartOffset() <= start && end <= range.getEndOffset()) && matches.indexOf(start) == -1) {
matches.add(start);
if (!consumer.process(new IndexPatternOccurrenceImpl(file, start, end, indexPattern))) {
return false;
}
}
}
ProgressManager.checkCanceled();
}
}
return true;
}
}
| |
package com.s4game.oa.common.entity;
import java.math.BigDecimal;
import java.util.Date;
public class PlanAssertYear {
private Long id;
private Integer assertId;
private String assertName;
private Integer yearTotalNum;
private BigDecimal unitPrice;
private Integer year;
private BigDecimal month1;
private BigDecimal month2;
private BigDecimal month3;
private BigDecimal month4;
private BigDecimal month5;
private BigDecimal month6;
private BigDecimal month7;
private BigDecimal month8;
private BigDecimal month9;
private BigDecimal month10;
private BigDecimal month11;
private BigDecimal month12;
private String remark;
private Date createTime;
private Date updateTime;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Integer getAssertId() {
return assertId;
}
public void setAssertId(Integer assertId) {
this.assertId = assertId;
}
public String getAssertName() {
return assertName;
}
public void setAssertName(String assertName) {
this.assertName = assertName == null ? null : assertName.trim();
}
public Integer getYearTotalNum() {
return yearTotalNum;
}
public void setYearTotalNum(Integer yearTotalNum) {
this.yearTotalNum = yearTotalNum;
}
public BigDecimal getUnitPrice() {
return unitPrice;
}
public void setUnitPrice(BigDecimal unitPrice) {
this.unitPrice = unitPrice;
}
public Integer getYear() {
return year;
}
public void setYear(Integer year) {
this.year = year;
}
public BigDecimal getMonth1() {
return month1;
}
public void setMonth1(BigDecimal month1) {
this.month1 = month1;
}
public BigDecimal getMonth2() {
return month2;
}
public void setMonth2(BigDecimal month2) {
this.month2 = month2;
}
public BigDecimal getMonth3() {
return month3;
}
public void setMonth3(BigDecimal month3) {
this.month3 = month3;
}
public BigDecimal getMonth4() {
return month4;
}
public void setMonth4(BigDecimal month4) {
this.month4 = month4;
}
public BigDecimal getMonth5() {
return month5;
}
public void setMonth5(BigDecimal month5) {
this.month5 = month5;
}
public BigDecimal getMonth6() {
return month6;
}
public void setMonth6(BigDecimal month6) {
this.month6 = month6;
}
public BigDecimal getMonth7() {
return month7;
}
public void setMonth7(BigDecimal month7) {
this.month7 = month7;
}
public BigDecimal getMonth8() {
return month8;
}
public void setMonth8(BigDecimal month8) {
this.month8 = month8;
}
public BigDecimal getMonth9() {
return month9;
}
public void setMonth9(BigDecimal month9) {
this.month9 = month9;
}
public BigDecimal getMonth10() {
return month10;
}
public void setMonth10(BigDecimal month10) {
this.month10 = month10;
}
public BigDecimal getMonth11() {
return month11;
}
public void setMonth11(BigDecimal month11) {
this.month11 = month11;
}
public BigDecimal getMonth12() {
return month12;
}
public void setMonth12(BigDecimal month12) {
this.month12 = month12;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark == null ? null : remark.trim();
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.overlord;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.metamx.common.Granularity;
import com.metamx.common.ISE;
import com.metamx.common.Pair;
import com.metamx.common.guava.Comparators;
import com.metamx.emitter.EmittingLogger;
import com.metamx.emitter.core.Event;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceEventBuilder;
import com.metamx.metrics.Monitor;
import com.metamx.metrics.MonitorScheduler;
import io.druid.client.cache.MapCache;
import io.druid.data.input.Firehose;
import io.druid.data.input.FirehoseFactory;
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.data.input.impl.InputRowParser;
import io.druid.granularity.QueryGranularity;
import io.druid.indexing.common.SegmentLoaderFactory;
import io.druid.indexing.common.TaskLock;
import io.druid.indexing.common.TaskStatus;
import io.druid.indexing.common.TaskToolbox;
import io.druid.indexing.common.TaskToolboxFactory;
import io.druid.indexing.common.TestUtils;
import io.druid.indexing.common.actions.LocalTaskActionClientFactory;
import io.druid.indexing.common.actions.LockListAction;
import io.druid.indexing.common.actions.SegmentInsertAction;
import io.druid.indexing.common.actions.TaskActionClient;
import io.druid.indexing.common.actions.TaskActionClientFactory;
import io.druid.indexing.common.actions.TaskActionToolbox;
import io.druid.indexing.common.config.TaskConfig;
import io.druid.indexing.common.config.TaskStorageConfig;
import io.druid.indexing.common.task.AbstractFixedIntervalTask;
import io.druid.indexing.common.task.IndexTask;
import io.druid.indexing.common.task.KillTask;
import io.druid.indexing.common.task.RealtimeIndexTask;
import io.druid.indexing.common.task.Task;
import io.druid.indexing.common.task.TaskResource;
import io.druid.indexing.overlord.config.TaskQueueConfig;
import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.metadata.SQLMetadataStorageActionHandlerFactory;
import io.druid.metadata.TestDerbyConnector;
import io.druid.query.QueryRunnerFactoryConglomerate;
import io.druid.query.SegmentDescriptor;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.segment.IndexIO;
import io.druid.segment.IndexMerger;
import io.druid.segment.IndexSpec;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.RealtimeIOConfig;
import io.druid.segment.indexing.RealtimeTuningConfig;
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.segment.loading.DataSegmentArchiver;
import io.druid.segment.loading.DataSegmentMover;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.segment.loading.LocalDataSegmentKiller;
import io.druid.segment.loading.SegmentLoaderConfig;
import io.druid.segment.loading.SegmentLoaderLocalCacheManager;
import io.druid.segment.loading.SegmentLoadingException;
import io.druid.segment.loading.StorageLocationConfig;
import io.druid.segment.realtime.FireDepartment;
import io.druid.segment.realtime.FireDepartmentTest;
import io.druid.segment.realtime.plumber.SegmentHandoffNotifier;
import io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory;
import io.druid.server.coordination.DataSegmentAnnouncer;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.NoneShardSpec;
import org.easymock.EasyMock;
import org.joda.time.DateTime;
import org.joda.time.Hours;
import org.joda.time.Interval;
import org.joda.time.Period;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
@RunWith(Parameterized.class)
public class TaskLifecycleTest
{
private static final ObjectMapper MAPPER;
private static final IndexMerger INDEX_MERGER;
private static final IndexIO INDEX_IO;
static {
TestUtils testUtils = new TestUtils();
MAPPER = testUtils.getTestObjectMapper();
INDEX_MERGER = testUtils.getTestIndexMerger();
INDEX_IO = testUtils.getTestIndexIO();
}
@Parameterized.Parameters(name = "taskStorageType={0}")
public static Collection<String[]> constructFeed()
{
return Arrays.asList(new String[][]{{"HeapMemoryTaskStorage"}, {"MetadataTaskStorage"}});
}
public TaskLifecycleTest(String taskStorageType)
{
this.taskStorageType = taskStorageType;
}
public final
@Rule
TemporaryFolder temporaryFolder = new TemporaryFolder();
private static final Ordering<DataSegment> byIntervalOrdering = new Ordering<DataSegment>()
{
@Override
public int compare(DataSegment dataSegment, DataSegment dataSegment2)
{
return Comparators.intervalsByStartThenEnd().compare(dataSegment.getInterval(), dataSegment2.getInterval());
}
};
private static DateTime now = new DateTime();
private static final Iterable<InputRow> realtimeIdxTaskInputRows = ImmutableList.of(
IR(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f),
IR(now.plus(new Period(Hours.ONE)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 2.0f),
IR(now.plus(new Period(Hours.TWO)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 3.0f)
);
private static final Iterable<InputRow> IdxTaskInputRows = ImmutableList.of(
IR("2010-01-01T01", "x", "y", 1),
IR("2010-01-01T01", "x", "z", 1),
IR("2010-01-02T01", "a", "b", 2),
IR("2010-01-02T01", "a", "c", 1)
);
@Rule
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
private final String taskStorageType;
private ObjectMapper mapper;
private TaskStorageQueryAdapter tsqa = null;
private File tmpDir = null;
private TaskStorage ts = null;
private TaskLockbox tl = null;
private TaskQueue tq = null;
private TaskRunner tr = null;
private TestIndexerMetadataStorageCoordinator mdc = null;
private TaskActionClientFactory tac = null;
private TaskToolboxFactory tb = null;
private IndexSpec indexSpec;
private QueryRunnerFactoryConglomerate queryRunnerFactoryConglomerate;
private MonitorScheduler monitorScheduler;
private ServiceEmitter emitter;
private TaskQueueConfig tqc;
private int pushedSegments;
private int announcedSinks;
private static CountDownLatch publishCountDown;
private TestDerbyConnector testDerbyConnector;
private SegmentHandoffNotifierFactory handoffNotifierFactory;
private Map<SegmentDescriptor, Pair<Executor, Runnable>> handOffCallbacks;
private static TestIndexerMetadataStorageCoordinator newMockMDC()
{
return new TestIndexerMetadataStorageCoordinator()
{
@Override
public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments)
{
Set<DataSegment> retVal = super.announceHistoricalSegments(segments);
publishCountDown.countDown();
return retVal;
}
};
}
private static ServiceEmitter newMockEmitter()
{
return new ServiceEmitter(null, null, null)
{
@Override
public void emit(Event event)
{
}
@Override
public void emit(ServiceEventBuilder builder)
{
}
};
}
private static InputRow IR(String dt, String dim1, String dim2, float met)
{
return new MapBasedInputRow(
new DateTime(dt).getMillis(),
ImmutableList.of("dim1", "dim2"),
ImmutableMap.<String, Object>of(
"dim1", dim1,
"dim2", dim2,
"met", met
)
);
}
private static class MockExceptionalFirehoseFactory implements FirehoseFactory
{
@Override
public Firehose connect(InputRowParser parser) throws IOException
{
return new Firehose()
{
@Override
public boolean hasMore()
{
return true;
}
@Override
public InputRow nextRow()
{
throw new RuntimeException("HA HA HA");
}
@Override
public Runnable commit()
{
return new Runnable()
{
@Override
public void run()
{
}
};
}
@Override
public void close() throws IOException
{
}
};
}
}
private static class MockFirehoseFactory implements FirehoseFactory
{
@JsonProperty
private boolean usedByRealtimeIdxTask;
@JsonCreator
public MockFirehoseFactory(@JsonProperty("usedByRealtimeIdxTask") boolean usedByRealtimeIdxTask)
{
this.usedByRealtimeIdxTask = usedByRealtimeIdxTask;
}
@Override
public Firehose connect(InputRowParser parser) throws IOException
{
final Iterator<InputRow> inputRowIterator = usedByRealtimeIdxTask
? realtimeIdxTaskInputRows.iterator()
: IdxTaskInputRows.iterator();
return new Firehose()
{
@Override
public boolean hasMore()
{
return inputRowIterator.hasNext();
}
@Override
public InputRow nextRow()
{
return inputRowIterator.next();
}
@Override
public Runnable commit()
{
return new Runnable()
{
@Override
public void run()
{
}
};
}
@Override
public void close() throws IOException
{
}
};
}
}
@Before
public void setUp() throws Exception
{
emitter = EasyMock.createMock(ServiceEmitter.class);
EmittingLogger.registerEmitter(emitter);
queryRunnerFactoryConglomerate = EasyMock.createStrictMock(QueryRunnerFactoryConglomerate.class);
monitorScheduler = EasyMock.createStrictMock(MonitorScheduler.class);
publishCountDown = new CountDownLatch(1);
announcedSinks = 0;
pushedSegments = 0;
tmpDir = temporaryFolder.newFolder();
TestUtils testUtils = new TestUtils();
mapper = testUtils.getTestObjectMapper();
tqc = mapper.readValue(
"{\"startDelay\":\"PT0S\", \"restartDelay\":\"PT1S\", \"storageSyncRate\":\"PT0.5S\"}",
TaskQueueConfig.class
);
indexSpec = new IndexSpec();
if (taskStorageType.equals("HeapMemoryTaskStorage")) {
ts = new HeapMemoryTaskStorage(
new TaskStorageConfig(null)
{
}
);
} else if (taskStorageType.equals("MetadataTaskStorage")) {
testDerbyConnector = derbyConnectorRule.getConnector();
mapper.registerSubtypes(
new NamedType(MockExceptionalFirehoseFactory.class, "mockExcepFirehoseFactory"),
new NamedType(MockFirehoseFactory.class, "mockFirehoseFactory")
);
testDerbyConnector.createTaskTables();
testDerbyConnector.createSegmentTable();
ts = new MetadataTaskStorage(
testDerbyConnector,
new TaskStorageConfig(null),
new SQLMetadataStorageActionHandlerFactory(
testDerbyConnector,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
mapper
)
);
} else {
throw new RuntimeException(String.format("Unknown task storage type [%s]", taskStorageType));
}
handOffCallbacks = Maps.newConcurrentMap();
handoffNotifierFactory = new SegmentHandoffNotifierFactory()
{
@Override
public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource)
{
return new SegmentHandoffNotifier()
{
@Override
public boolean registerSegmentHandoffCallback(
SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable
)
{
handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
return true;
}
@Override
public void start()
{
//Noop
}
@Override
public void stop()
{
//Noop
}
Map<SegmentDescriptor, Pair<Executor, Runnable>> getHandOffCallbacks()
{
return handOffCallbacks;
}
};
}
};
setUpAndStartTaskQueue(
new DataSegmentPusher()
{
@Override
public String getPathForHadoop(String dataSource)
{
throw new UnsupportedOperationException();
}
@Override
public DataSegment push(File file, DataSegment segment) throws IOException
{
pushedSegments++;
return segment;
}
}
);
}
private void setUpAndStartTaskQueue(DataSegmentPusher dataSegmentPusher)
{
final TaskConfig taskConfig = new TaskConfig(tmpDir.toString(), null, null, 50000, null, null, null);
tsqa = new TaskStorageQueryAdapter(ts);
tl = new TaskLockbox(ts);
mdc = newMockMDC();
tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter()));
tb = new TaskToolboxFactory(
taskConfig,
tac,
newMockEmitter(),
dataSegmentPusher,
new LocalDataSegmentKiller(),
new DataSegmentMover()
{
@Override
public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec)
throws SegmentLoadingException
{
return dataSegment;
}
},
new DataSegmentArchiver()
{
@Override
public DataSegment archive(DataSegment segment) throws SegmentLoadingException
{
return segment;
}
@Override
public DataSegment restore(DataSegment segment) throws SegmentLoadingException
{
return segment;
}
},
new DataSegmentAnnouncer()
{
@Override
public void announceSegment(DataSegment segment) throws IOException
{
announcedSinks++;
}
@Override
public void unannounceSegment(DataSegment segment) throws IOException
{
}
@Override
public void announceSegments(Iterable<DataSegment> segments) throws IOException
{
}
@Override
public void unannounceSegments(Iterable<DataSegment> segments) throws IOException
{
}
}, // segment announcer
handoffNotifierFactory,
queryRunnerFactoryConglomerate, // query runner factory conglomerate corporation unionized collective
null, // query executor service
monitorScheduler, // monitor scheduler
new SegmentLoaderFactory(
new SegmentLoaderLocalCacheManager(
null,
new SegmentLoaderConfig()
{
@Override
public List<StorageLocationConfig> getLocations()
{
return Lists.newArrayList();
}
}, new DefaultObjectMapper()
)
),
MAPPER,
INDEX_MERGER,
INDEX_IO,
MapCache.create(0),
FireDepartmentTest.NO_CACHE_CONFIG
);
tr = new ThreadPoolTaskRunner(tb, taskConfig, emitter);
tq = new TaskQueue(tqc, ts, tr, tac, tl, emitter);
tq.start();
}
@After
public void tearDown()
{
tq.stop();
}
@Test
public void testIndexTask() throws Exception
{
final Task indexTask = new IndexTask(
null,
null,
new IndexTask.IndexIngestionSpec(
new DataSchema(
"foo",
null,
new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")},
new UniformGranularitySpec(
Granularity.DAY,
null,
ImmutableList.of(new Interval("2010-01-01/P2D"))
),
mapper
),
new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)
),
mapper,
null
);
final Optional<TaskStatus> preRunTaskStatus = tsqa.getStatus(indexTask.getId());
Assert.assertTrue("pre run task status not present", !preRunTaskStatus.isPresent());
final TaskStatus mergedStatus = runTask(indexTask);
final TaskStatus status = ts.getStatus(indexTask.getId()).get();
final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
final List<DataSegment> loggedSegments = byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, mergedStatus.getStatusCode());
Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval());
Assert.assertEquals(
"segment1 dimensions",
ImmutableList.of("dim1", "dim2"),
publishedSegments.get(0).getDimensions()
);
Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());
Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval());
Assert.assertEquals(
"segment2 dimensions",
ImmutableList.of("dim1", "dim2"),
publishedSegments.get(1).getDimensions()
);
Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
}
@Test
public void testIndexTaskFailure() throws Exception
{
final Task indexTask = new IndexTask(
null,
null,
new IndexTask.IndexIngestionSpec(
new DataSchema(
"foo",
null,
new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")},
new UniformGranularitySpec(
Granularity.DAY,
null,
ImmutableList.of(new Interval("2010-01-01/P1D"))
),
mapper
),
new IndexTask.IndexIOConfig(new MockExceptionalFirehoseFactory()),
new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)
),
mapper,
null
);
final TaskStatus status = runTask(indexTask);
Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testKillTask() throws Exception
{
final File tmpSegmentDir = temporaryFolder.newFolder();
List<DataSegment> expectedUnusedSegments = Lists.transform(
ImmutableList.<String>of(
"2011-04-01/2011-04-02",
"2011-04-02/2011-04-03",
"2011-04-04/2011-04-05"
), new Function<String, DataSegment>()
{
@Override
public DataSegment apply(String input)
{
final Interval interval = new Interval(input);
try {
return DataSegment.builder()
.dataSource("test_kill_task")
.interval(interval)
.loadSpec(
ImmutableMap.<String, Object>of(
"type",
"local",
"path",
tmpSegmentDir.getCanonicalPath()
+ "/druid/localStorage/wikipedia/"
+ interval.getStart()
+ "-"
+ interval.getEnd()
+ "/"
+ "2011-04-6T16:52:46.119-05:00"
+ "/0/index.zip"
)
)
.version("2011-04-6T16:52:46.119-05:00")
.dimensions(ImmutableList.<String>of())
.metrics(ImmutableList.<String>of())
.shardSpec(new NoneShardSpec())
.binaryVersion(9)
.size(0)
.build();
}
catch (IOException e) {
throw new ISE(e, "Error creating segments");
}
}
}
);
mdc.setUnusedSegments(expectedUnusedSegments);
// manually create local segments files
List<File> segmentFiles = Lists.newArrayList();
for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", new Interval("2011-04-01/P4D"))) {
File file = new File((String) segment.getLoadSpec().get("path"));
file.mkdirs();
segmentFiles.add(file);
}
final Task killTask = new KillTask(null, "test_kill_task", new Interval("2011-04-01/P4D"), null);
final TaskStatus status = runTask(killTask);
Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 3, mdc.getNuked().size());
Assert.assertTrue(
"expected unused segments get killed",
expectedUnusedSegments.containsAll(mdc.getNuked()) && mdc.getNuked().containsAll(
expectedUnusedSegments
)
);
for (File file : segmentFiles) {
Assert.assertFalse("unused segments files get deleted", file.exists());
}
}
@Test
public void testRealtimeishTask() throws Exception
{
final Task rtishTask = new RealtimeishTask();
final TaskStatus status = runTask(rtishTask);
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testNoopTask() throws Exception
{
final Task noopTask = new DefaultObjectMapper().readValue(
"{\"type\":\"noop\", \"runTime\":\"100\"}\"",
Task.class
);
final TaskStatus status = runTask(noopTask);
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testNeverReadyTask() throws Exception
{
final Task neverReadyTask = new DefaultObjectMapper().readValue(
"{\"type\":\"noop\", \"isReadyResult\":\"exception\"}\"",
Task.class
);
final TaskStatus status = runTask(neverReadyTask);
Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testSimple() throws Exception
{
final Task task = new AbstractFixedIntervalTask(
"id1",
"id1",
new TaskResource("id1", 1),
"ds",
new Interval("2012-01-01/P1D"),
null
)
{
@Override
public String getType()
{
return "test";
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
{
final TaskLock myLock = Iterables.getOnlyElement(
toolbox.getTaskActionClient()
.submit(new LockListAction())
);
final DataSegment segment = DataSegment.builder()
.dataSource("ds")
.interval(new Interval("2012-01-01/P1D"))
.version(myLock.getVersion())
.build();
toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of(segment)));
return TaskStatus.success(getId());
}
};
final TaskStatus status = runTask(task);
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("segments published", 1, mdc.getPublished().size());
Assert.assertEquals("segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testBadInterval() throws Exception
{
final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null)
{
@Override
public String getType()
{
return "test";
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
{
final TaskLock myLock = Iterables.getOnlyElement(toolbox.getTaskActionClient().submit(new LockListAction()));
final DataSegment segment = DataSegment.builder()
.dataSource("ds")
.interval(new Interval("2012-01-01/P2D"))
.version(myLock.getVersion())
.build();
toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of(segment)));
return TaskStatus.success(getId());
}
};
final TaskStatus status = runTask(task);
Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
Assert.assertEquals("segments published", 0, mdc.getPublished().size());
Assert.assertEquals("segments nuked", 0, mdc.getNuked().size());
}
@Test
public void testBadVersion() throws Exception
{
final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null)
{
@Override
public String getType()
{
return "test";
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
{
final TaskLock myLock = Iterables.getOnlyElement(toolbox.getTaskActionClient().submit(new LockListAction()));
final DataSegment segment = DataSegment.builder()
.dataSource("ds")
.interval(new Interval("2012-01-01/P1D"))
.version(myLock.getVersion() + "1!!!1!!")
.build();
toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of(segment)));
return TaskStatus.success(getId());
}
};
final TaskStatus status = runTask(task);
Assert.assertEquals("statusCode", TaskStatus.Status.FAILED, status.getStatusCode());
Assert.assertEquals("segments published", 0, mdc.getPublished().size());
Assert.assertEquals("segments nuked", 0, mdc.getNuked().size());
}
@Test(timeout = 4000L)
public void testRealtimeIndexTask() throws Exception
{
monitorScheduler.addMonitor(EasyMock.anyObject(Monitor.class));
EasyMock.expectLastCall().atLeastOnce();
monitorScheduler.removeMonitor(EasyMock.anyObject(Monitor.class));
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(monitorScheduler, queryRunnerFactoryConglomerate);
RealtimeIndexTask realtimeIndexTask = giveMeARealtimeIndexTask();
final String taskId = realtimeIndexTask.getId();
tq.add(realtimeIndexTask);
//wait for task to process events and publish segment
Assert.assertTrue(publishCountDown.await(1000, TimeUnit.MILLISECONDS));
// Realtime Task has published the segment, simulate loading of segment to a historical node so that task finishes with SUCCESS status
Assert.assertEquals(1, handOffCallbacks.size());
Pair<Executor, Runnable> executorRunnablePair = Iterables.getOnlyElement(handOffCallbacks.values());
executorRunnablePair.lhs.execute(executorRunnablePair.rhs);
handOffCallbacks.clear();
// Wait for realtime index task to handle callback in plumber and succeed
while (tsqa.getStatus(taskId).get().isRunnable()) {
Thread.sleep(10);
}
Assert.assertTrue("Task should be in Success state", tsqa.getStatus(taskId).get().isSuccess());
Assert.assertEquals(1, announcedSinks);
Assert.assertEquals(1, pushedSegments);
Assert.assertEquals(1, mdc.getPublished().size());
DataSegment segment = mdc.getPublished().iterator().next();
Assert.assertEquals("test_ds", segment.getDataSource());
Assert.assertEquals(ImmutableList.of("dim1", "dim2"), segment.getDimensions());
Assert.assertEquals(
new Interval(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")),
segment.getInterval()
);
Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics());
EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
}
@Test(timeout = 4000L)
public void testRealtimeIndexTaskFailure() throws Exception
{
setUpAndStartTaskQueue(
new DataSegmentPusher()
{
@Override
public String getPathForHadoop(String s)
{
throw new UnsupportedOperationException();
}
@Override
public DataSegment push(File file, DataSegment dataSegment) throws IOException
{
throw new RuntimeException("FAILURE");
}
}
);
monitorScheduler.addMonitor(EasyMock.anyObject(Monitor.class));
EasyMock.expectLastCall().atLeastOnce();
monitorScheduler.removeMonitor(EasyMock.anyObject(Monitor.class));
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(monitorScheduler, queryRunnerFactoryConglomerate);
RealtimeIndexTask realtimeIndexTask = giveMeARealtimeIndexTask();
final String taskId = realtimeIndexTask.getId();
tq.add(realtimeIndexTask);
// Wait for realtime index task to fail
while (tsqa.getStatus(taskId).get().isRunnable()) {
Thread.sleep(10);
}
Assert.assertTrue("Task should be in Failure state", tsqa.getStatus(taskId).get().isFailure());
EasyMock.verify(monitorScheduler, queryRunnerFactoryConglomerate);
}
@Test
public void testResumeTasks() throws Exception
{
final Task indexTask = new IndexTask(
null,
null,
new IndexTask.IndexIngestionSpec(
new DataSchema(
"foo",
null,
new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")},
new UniformGranularitySpec(
Granularity.DAY,
null,
ImmutableList.of(new Interval("2010-01-01/P2D"))
),
mapper
),
new IndexTask.IndexIOConfig(new MockFirehoseFactory(false)),
new IndexTask.IndexTuningConfig(10000, 10, -1, indexSpec)
),
mapper,
null
);
final long startTime = System.currentTimeMillis();
// manually insert the task into TaskStorage, waiting for TaskQueue to sync from storage
ts.insert(indexTask, TaskStatus.running(indexTask.getId()));
while (tsqa.getStatus(indexTask.getId()).get().isRunnable()) {
if (System.currentTimeMillis() > startTime + 10 * 1000) {
throw new ISE("Where did the task go?!: %s", indexTask.getId());
}
Thread.sleep(100);
}
final TaskStatus status = ts.getStatus(indexTask.getId()).get();
final List<DataSegment> publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished());
final List<DataSegment> loggedSegments = byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId()));
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("segments logged vs published", loggedSegments, publishedSegments);
Assert.assertEquals("num segments published", 2, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource());
Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval());
Assert.assertEquals(
"segment1 dimensions",
ImmutableList.of("dim1", "dim2"),
publishedSegments.get(0).getDimensions()
);
Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics());
Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource());
Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval());
Assert.assertEquals(
"segment2 dimensions",
ImmutableList.of("dim1", "dim2"),
publishedSegments.get(1).getDimensions()
);
Assert.assertEquals("segment2 metrics", ImmutableList.of("met"), publishedSegments.get(1).getMetrics());
}
private TaskStatus runTask(final Task task) throws Exception
{
final Task dummyTask = new DefaultObjectMapper().readValue(
"{\"type\":\"noop\", \"isReadyResult\":\"exception\"}\"",
Task.class
);
final long startTime = System.currentTimeMillis();
Preconditions.checkArgument(!task.getId().equals(dummyTask.getId()));
tq.add(dummyTask);
tq.add(task);
TaskStatus retVal = null;
for (final String taskId : ImmutableList.of(dummyTask.getId(), task.getId())) {
try {
TaskStatus status;
while ((status = tsqa.getStatus(taskId).get()).isRunnable()) {
if (System.currentTimeMillis() > startTime + 10 * 1000) {
throw new ISE("Where did the task go?!: %s", task.getId());
}
Thread.sleep(100);
}
if (taskId.equals(task.getId())) {
retVal = status;
}
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
return retVal;
}
private RealtimeIndexTask giveMeARealtimeIndexTask()
{
String taskId = String.format("rt_task_%s", System.currentTimeMillis());
DataSchema dataSchema = new DataSchema(
"test_ds",
null,
new AggregatorFactory[]{new LongSumAggregatorFactory("count", "rows")},
new UniformGranularitySpec(Granularity.DAY, QueryGranularity.NONE, null),
mapper
);
RealtimeIOConfig realtimeIOConfig = new RealtimeIOConfig(
new MockFirehoseFactory(true),
null,
// PlumberSchool - Realtime Index Task always uses RealtimePlumber which is hardcoded in RealtimeIndexTask class
null
);
RealtimeTuningConfig realtimeTuningConfig = new RealtimeTuningConfig(
1000,
new Period("P1Y"),
null, //default window period of 10 minutes
null, // base persist dir ignored by Realtime Index task
null,
null,
null,
null,
null
);
FireDepartment fireDepartment = new FireDepartment(dataSchema, realtimeIOConfig, realtimeTuningConfig);
return new RealtimeIndexTask(
taskId,
new TaskResource(taskId, 1),
fireDepartment,
null
);
}
}
| |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.policy;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import android.content.Context;
import android.content.SharedPreferences;
import android.util.Pair;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.chromium.base.CollectionUtil;
import org.chromium.base.ContextUtils;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.build.BuildConfig;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/** Robolectric test for PolicyCache. */
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public final class PolicyCacheTest {
private static final String POLICY_NAME = "policy-name";
private static final String POLICY_NAME_2 = "policy-name-2";
private static final String POLICY_NAME_3 = "policy-name-3";
private static final String POLICY_NAME_4 = "policy-name-4";
private static final String POLICY_NAME_5 = "policy-name-5";
private SharedPreferences mSharedPreferences;
private PolicyCache mPolicyCache;
@Mock
private PolicyMap mPolicyMap;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
PolicyCache.resetForTesting();
mPolicyCache = PolicyCache.get();
mSharedPreferences = ContextUtils.getApplicationContext().getSharedPreferences(
PolicyCache.POLICY_PREF, Context.MODE_PRIVATE);
initPolicyMap();
}
private void initPolicyMap() {
when(mPolicyMap.getIntValue(anyString())).thenReturn(null);
when(mPolicyMap.getBooleanValue(anyString())).thenReturn(null);
when(mPolicyMap.getStringValue(anyString())).thenReturn(null);
when(mPolicyMap.getListValueAsString(anyString())).thenReturn(null);
when(mPolicyMap.getDictValueAsString(anyString())).thenReturn(null);
}
@After
public void tearDown() {}
@Test
public void testGetInt() {
Assert.assertNull(mPolicyCache.getIntValue(POLICY_NAME));
int expectedPolicyValue = 42;
mSharedPreferences.edit().putInt(POLICY_NAME, expectedPolicyValue).apply();
Assert.assertEquals(expectedPolicyValue, mPolicyCache.getIntValue(POLICY_NAME).intValue());
}
@Test
public void testGetBoolean() {
Assert.assertNull(mPolicyCache.getBooleanValue(POLICY_NAME));
boolean expectedPolicyValue = true;
mSharedPreferences.edit().putBoolean(POLICY_NAME, expectedPolicyValue).apply();
Assert.assertEquals(
expectedPolicyValue, mPolicyCache.getBooleanValue(POLICY_NAME).booleanValue());
}
@Test
public void testGetString() {
Assert.assertNull(mPolicyCache.getStringValue(POLICY_NAME));
String expectedPolicyValue = "test-value";
mSharedPreferences.edit().putString(POLICY_NAME, expectedPolicyValue).apply();
Assert.assertEquals(expectedPolicyValue, mPolicyCache.getStringValue(POLICY_NAME));
}
@Test
public void testGetList() throws JSONException {
Assert.assertNull(mPolicyCache.getListValue(POLICY_NAME));
String policyValue = "[42, \"test\", true]";
mSharedPreferences.edit().putString(POLICY_NAME, policyValue).apply();
JSONArray actualPolicyValue = mPolicyCache.getListValue(POLICY_NAME);
Assert.assertNotNull(actualPolicyValue);
Assert.assertEquals(3, actualPolicyValue.length());
Assert.assertEquals(42, actualPolicyValue.getInt(0));
Assert.assertEquals("test", actualPolicyValue.getString(1));
Assert.assertEquals(true, actualPolicyValue.getBoolean(2));
}
@Test
public void testGetInvalidList() throws JSONException {
String policyValue = "[42, \"test\"";
mSharedPreferences.edit().putString(POLICY_NAME, policyValue).apply();
Assert.assertNull(mPolicyCache.getListValue(POLICY_NAME));
}
@Test
public void testGetDict() throws JSONException {
Assert.assertNull(mPolicyCache.getDictValue(POLICY_NAME));
String policyValue = "{\"key1\":\"value1\", \"key2\":{\"a\":1, \"b\":2}}";
mSharedPreferences.edit().putString(POLICY_NAME, policyValue).apply();
JSONObject actualPolicyValue = mPolicyCache.getDictValue(POLICY_NAME);
Assert.assertNotNull(actualPolicyValue);
Assert.assertEquals(2, actualPolicyValue.length());
Assert.assertEquals("value1", actualPolicyValue.getString("key1"));
Assert.assertEquals(1, actualPolicyValue.getJSONObject("key2").getInt("a"));
Assert.assertEquals(2, actualPolicyValue.getJSONObject("key2").getInt("b"));
}
@Test
public void testGetInvalidDict() throws JSONException {
Assert.assertNull(mPolicyCache.getDictValue(POLICY_NAME));
String policyValue = "{\"key1\":\"value1\", \"key2\":{\"a\":1, \"b\":2}";
mSharedPreferences.edit().putString(POLICY_NAME, policyValue).apply();
Assert.assertNull(mPolicyCache.getListValue(POLICY_NAME));
}
@Test
public void testCachePolicies() {
cachePolicies(CollectionUtil.newHashMap(
Pair.create(POLICY_NAME, Pair.create(PolicyCache.Type.Integer, 1)),
Pair.create(POLICY_NAME_2, Pair.create(PolicyCache.Type.Boolean, true)),
Pair.create(POLICY_NAME_3, Pair.create(PolicyCache.Type.String, "2")),
Pair.create(POLICY_NAME_4, Pair.create(PolicyCache.Type.List, "[1]")),
Pair.create(POLICY_NAME_5, Pair.create(PolicyCache.Type.Dict, "{1:2}"))));
Assert.assertEquals(1, mSharedPreferences.getInt(POLICY_NAME, 0));
Assert.assertEquals(true, mSharedPreferences.getBoolean(POLICY_NAME_2, false));
Assert.assertEquals("2", mSharedPreferences.getString(POLICY_NAME_3, null));
Assert.assertEquals("[1]", mSharedPreferences.getString(POLICY_NAME_4, null));
Assert.assertEquals("{1:2}", mSharedPreferences.getString(POLICY_NAME_5, null));
}
@Test
public void testCacheUpdated() {
cachePolicies(CollectionUtil.newHashMap(
Pair.create(POLICY_NAME, Pair.create(PolicyCache.Type.Integer, 1))));
cachePolicies(CollectionUtil.newHashMap(
Pair.create(POLICY_NAME_2, Pair.create(PolicyCache.Type.Boolean, true))));
Assert.assertFalse(mSharedPreferences.contains(POLICY_NAME));
Assert.assertEquals(true, mSharedPreferences.getBoolean(POLICY_NAME_2, false));
}
@Test
public void testNotCachingUnnecessaryPolicy() {
when(mPolicyMap.getIntValue(eq(POLICY_NAME))).thenReturn(1);
when(mPolicyMap.getBooleanValue(eq(POLICY_NAME_2))).thenReturn(true);
mPolicyCache.cachePolicies(
mPolicyMap, Arrays.asList(Pair.create(POLICY_NAME_2, PolicyCache.Type.Boolean)));
Assert.assertFalse(mSharedPreferences.contains(POLICY_NAME));
Assert.assertEquals(true, mSharedPreferences.getBoolean(POLICY_NAME_2, false));
}
@Test
public void testNotCachingUnavailablePolicy() {
when(mPolicyMap.getBooleanValue(eq(POLICY_NAME_2))).thenReturn(true);
mPolicyCache.cachePolicies(mPolicyMap,
Arrays.asList(Pair.create(POLICY_NAME, PolicyCache.Type.Integer),
Pair.create(POLICY_NAME_2, PolicyCache.Type.Boolean)));
Assert.assertFalse(mSharedPreferences.contains(POLICY_NAME));
Assert.assertEquals(true, mSharedPreferences.getBoolean(POLICY_NAME_2, false));
}
@Test
public void testWriteOnlyAfterCacheUpdate() {
mSharedPreferences.edit()
.putInt(POLICY_NAME, 1)
.putBoolean(POLICY_NAME_2, true)
.putString(POLICY_NAME_3, "a")
.putString(POLICY_NAME_4, "[1]")
.putString(POLICY_NAME_5, "{1:2}")
.apply();
Assert.assertTrue(mPolicyCache.isReadable());
cachePolicies(CollectionUtil.newHashMap(
Pair.create(POLICY_NAME, Pair.create(PolicyCache.Type.Integer, 1)),
Pair.create(POLICY_NAME_2, Pair.create(PolicyCache.Type.Boolean, true)),
Pair.create(POLICY_NAME_3, Pair.create(PolicyCache.Type.String, "2")),
Pair.create(POLICY_NAME_4, Pair.create(PolicyCache.Type.List, "[1]")),
Pair.create(POLICY_NAME_5, Pair.create(PolicyCache.Type.Dict, "{1:2}"))));
Assert.assertFalse(mPolicyCache.isReadable());
if (BuildConfig.ENABLE_ASSERTS) {
assertAssertionError(() -> mPolicyCache.getIntValue(POLICY_NAME));
assertAssertionError(() -> mPolicyCache.getBooleanValue(POLICY_NAME_2));
assertAssertionError(() -> mPolicyCache.getStringValue(POLICY_NAME_3));
assertAssertionError(() -> mPolicyCache.getListValue(POLICY_NAME_4));
assertAssertionError(() -> mPolicyCache.getDictValue(POLICY_NAME_5));
}
}
/**
* @param policies A Map for policies that needs to be cached. Each policy
* name is mapped to a pair of policy type and policy value.
* Setting up {@link #mPolicyCache} mock and call {@link PolicyCache#cachePolicies}.
*/
private void cachePolicies(Map<String, Pair<PolicyCache.Type, Object>> policies) {
List<Pair<String, PolicyCache.Type>> cachedPolicies = new ArrayList();
CollectionUtil.forEach(policies, entry -> {
String policyName = entry.getKey();
PolicyCache.Type policyType = entry.getValue().first;
Object policyValue = entry.getValue().second;
switch (policyType) {
case Integer:
when(mPolicyMap.getIntValue(eq(policyName))).thenReturn((Integer) policyValue);
break;
case Boolean:
when(mPolicyMap.getBooleanValue(eq(policyName)))
.thenReturn((Boolean) policyValue);
break;
case String:
when(mPolicyMap.getStringValue(eq(policyName)))
.thenReturn((String) policyValue);
break;
case List:
when(mPolicyMap.getListValueAsString(eq(policyName)))
.thenReturn((String) policyValue);
break;
case Dict:
when(mPolicyMap.getDictValueAsString(eq(policyName)))
.thenReturn((String) policyValue);
break;
}
cachedPolicies.add(Pair.create(policyName, policyType));
});
mPolicyCache.cachePolicies(mPolicyMap, cachedPolicies);
}
private void assertAssertionError(Runnable runnable) {
AssertionError assertionError = null;
try {
runnable.run();
} catch (AssertionError e) {
assertionError = e;
}
Assert.assertNotNull("AssertionError not thrown", assertionError);
}
}
| |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.media;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.RestrictionsManager;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.net.Uri;
import android.os.Build;
import android.provider.Browser;
import android.text.TextUtils;
import androidx.browser.customtabs.CustomTabsIntent;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.ContextUtils;
import org.chromium.base.SysUtils;
import org.chromium.base.task.PostTask;
import org.chromium.base.task.TaskTraits;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.IntentHandler;
import org.chromium.chrome.browser.browserservices.BrowserServicesIntentDataProvider.CustomTabsUiType;
import org.chromium.chrome.browser.customtabs.CustomTabIntentDataProvider;
import org.chromium.chrome.browser.document.ChromeLauncherActivity;
import org.chromium.chrome.browser.flags.ChromeFeatureList;
import java.util.Locale;
/**
* A class containing some utility static methods.
*/
public class MediaViewerUtils {
private static final String DEFAULT_MIME_TYPE = "*/*";
private static final String MIMETYPE_AUDIO = "audio";
private static final String MIMETYPE_IMAGE = "image";
private static final String MIMETYPE_VIDEO = "video";
private static boolean sIsMediaLauncherActivityForceEnabledForTest;
/**
* Creates an Intent that allows viewing the given file in an internal media viewer.
* @param displayUri URI to display to the user, ideally in file:// form.
* @param contentUri content:// URI pointing at the file.
* @param mimeType MIME type of the file.
* @param allowExternalAppHandlers Whether the viewer should allow the user to open with another
* app.
* @return Intent that can be fired to open the file.
*/
public static Intent getMediaViewerIntent(
Uri displayUri, Uri contentUri, String mimeType, boolean allowExternalAppHandlers) {
Context context = ContextUtils.getApplicationContext();
Bitmap closeIcon = BitmapFactory.decodeResource(
context.getResources(), R.drawable.ic_arrow_back_white_24dp);
Bitmap shareIcon = BitmapFactory.decodeResource(
context.getResources(), R.drawable.ic_share_white_24dp);
CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder();
builder.setToolbarColor(Color.BLACK);
builder.setCloseButtonIcon(closeIcon);
builder.setShowTitle(true);
if (allowExternalAppHandlers && !willExposeFileUri(contentUri)) {
// Create a PendingIntent that can be used to view the file externally.
// TODO(https://crbug.com/795968): Check if this is problematic in multi-window mode,
// where two different viewers could be visible at the
// same time.
Intent viewIntent = createViewIntentForUri(contentUri, mimeType, null, null);
Intent chooserIntent = Intent.createChooser(viewIntent, null);
chooserIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
String openWithStr = context.getString(R.string.download_manager_open_with);
PendingIntent pendingViewIntent = PendingIntent.getActivity(
context, 0, chooserIntent, PendingIntent.FLAG_CANCEL_CURRENT);
builder.addMenuItem(openWithStr, pendingViewIntent);
}
// Create a PendingIntent that shares the file with external apps.
// If the URI is a file URI and the Android version is N or later, this will throw a
// FileUriExposedException. In this case, we just don't add the share button.
if (!willExposeFileUri(contentUri)) {
PendingIntent pendingShareIntent = PendingIntent.getActivity(context, 0,
createShareIntent(contentUri, mimeType), PendingIntent.FLAG_CANCEL_CURRENT);
builder.setActionButton(
shareIcon, context.getString(R.string.share), pendingShareIntent, true);
}
// The color of the media viewer is dependent on the file type.
int backgroundRes;
if (isImageType(mimeType)) {
backgroundRes = R.color.image_viewer_bg;
} else {
backgroundRes = R.color.media_viewer_bg;
}
int mediaColor = ApiCompatibilityUtils.getColor(context.getResources(), backgroundRes);
// Build up the Intent further.
Intent intent = builder.build().intent;
intent.setPackage(context.getPackageName());
intent.setData(contentUri);
intent.putExtra(CustomTabIntentDataProvider.EXTRA_UI_TYPE, CustomTabsUiType.MEDIA_VIEWER);
intent.putExtra(CustomTabIntentDataProvider.EXTRA_MEDIA_VIEWER_URL, displayUri.toString());
intent.putExtra(CustomTabIntentDataProvider.EXTRA_ENABLE_EMBEDDED_MEDIA_EXPERIENCE, true);
intent.putExtra(CustomTabIntentDataProvider.EXTRA_INITIAL_BACKGROUND_COLOR, mediaColor);
intent.putExtra(CustomTabsIntent.EXTRA_TOOLBAR_COLOR, mediaColor);
intent.putExtra(Browser.EXTRA_APPLICATION_ID, context.getPackageName());
IntentHandler.addTrustedIntentExtras(intent);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setClass(context, ChromeLauncherActivity.class);
return intent;
}
/**
* Creates an Intent to open the file in another app by firing an Intent to Android.
* @param fileUri Uri pointing to the file.
* @param mimeType MIME type for the file.
* @param originalUrl The original url of the downloaded file.
* @param referrer Referrer of the downloaded file.
* @return Intent that can be used to start an Activity for the file.
*/
public static Intent createViewIntentForUri(
Uri fileUri, String mimeType, String originalUrl, String referrer) {
Intent fileIntent = new Intent(Intent.ACTION_VIEW);
String normalizedMimeType = Intent.normalizeMimeType(mimeType);
if (TextUtils.isEmpty(normalizedMimeType)) {
fileIntent.setData(fileUri);
} else {
fileIntent.setDataAndType(fileUri, normalizedMimeType);
}
fileIntent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
fileIntent.addFlags(Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
fileIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
setOriginalUrlAndReferralExtraToIntent(fileIntent, originalUrl, referrer);
return fileIntent;
}
/**
* Adds the originating Uri and referrer extras to an intent if they are not null.
* @param intent Intent for adding extras.
* @param originalUrl The original url of the downloaded file.
* @param referrer Referrer of the downloaded file.
*/
public static void setOriginalUrlAndReferralExtraToIntent(
Intent intent, String originalUrl, String referrer) {
if (originalUrl != null) {
intent.putExtra(Intent.EXTRA_ORIGINATING_URI, Uri.parse(originalUrl));
}
if (referrer != null) intent.putExtra(Intent.EXTRA_REFERRER, Uri.parse(originalUrl));
}
/**
* Determines the media type from the given MIME type.
* @param mimeType The MIME type to check.
* @return MediaLauncherActivity.MediaType enum value for determined media type.
*/
static int getMediaTypeFromMIMEType(String mimeType) {
if (TextUtils.isEmpty(mimeType)) return MediaLauncherActivity.MediaType.UNKNOWN;
String[] pieces = mimeType.toLowerCase(Locale.getDefault()).split("/");
if (pieces.length != 2) return MediaLauncherActivity.MediaType.UNKNOWN;
switch (pieces[0]) {
case MIMETYPE_AUDIO:
return MediaLauncherActivity.MediaType.AUDIO;
case MIMETYPE_IMAGE:
return MediaLauncherActivity.MediaType.IMAGE;
case MIMETYPE_VIDEO:
return MediaLauncherActivity.MediaType.VIDEO;
default:
return MediaLauncherActivity.MediaType.UNKNOWN;
}
}
/**
* Selectively enables or disables the MediaLauncherActivity.
*/
public static void updateMediaLauncherActivityEnabled() {
PostTask.postTask(TaskTraits.BEST_EFFORT_MAY_BLOCK,
() -> { synchronousUpdateMediaLauncherActivityEnabled(); });
}
static void synchronousUpdateMediaLauncherActivityEnabled() {
Context context = ContextUtils.getApplicationContext();
PackageManager packageManager = context.getPackageManager();
ComponentName mediaComponentName = new ComponentName(context, MediaLauncherActivity.class);
ComponentName audioComponentName = new ComponentName(
context, "org.chromium.chrome.browser.media.AudioLauncherActivity");
int newMediaState = shouldEnableMediaLauncherActivity()
? PackageManager.COMPONENT_ENABLED_STATE_ENABLED
: PackageManager.COMPONENT_ENABLED_STATE_DISABLED;
int newAudioState = shouldEnableAudioLauncherActivity()
? PackageManager.COMPONENT_ENABLED_STATE_ENABLED
: PackageManager.COMPONENT_ENABLED_STATE_DISABLED;
// This indicates that we don't want to kill Chrome when changing component enabled
// state.
int flags = PackageManager.DONT_KILL_APP;
if (packageManager.getComponentEnabledSetting(mediaComponentName) != newMediaState) {
packageManager.setComponentEnabledSetting(mediaComponentName, newMediaState, flags);
}
if (packageManager.getComponentEnabledSetting(audioComponentName) != newAudioState) {
packageManager.setComponentEnabledSetting(audioComponentName, newAudioState, flags);
}
}
/**
* Force MediaLauncherActivity to be enabled for testing.
*/
public static void forceEnableMediaLauncherActivityForTest() {
sIsMediaLauncherActivityForceEnabledForTest = true;
// Synchronously update to avoid race conditions in tests.
synchronousUpdateMediaLauncherActivityEnabled();
}
/**
* Stops forcing MediaLauncherActivity to be enabled for testing.
*/
public static void stopForcingEnableMediaLauncherActivityForTest() {
sIsMediaLauncherActivityForceEnabledForTest = false;
// Synchronously update to avoid race conditions in tests.
synchronousUpdateMediaLauncherActivityEnabled();
}
private static boolean shouldEnableMediaLauncherActivity() {
return sIsMediaLauncherActivityForceEnabledForTest
|| ((SysUtils.isAndroidGo() || isEnterpriseManaged())
&& ChromeFeatureList.isEnabled(ChromeFeatureList.HANDLE_MEDIA_INTENTS));
}
private static boolean shouldEnableAudioLauncherActivity() {
return shouldEnableMediaLauncherActivity() && !SysUtils.isAndroidGo();
}
private static boolean isEnterpriseManaged() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) return false;
RestrictionsManager restrictionsManager =
(RestrictionsManager) ContextUtils.getApplicationContext().getSystemService(
Context.RESTRICTIONS_SERVICE);
return restrictionsManager.hasRestrictionsProvider()
|| !restrictionsManager.getApplicationRestrictions().isEmpty();
}
private static Intent createShareIntent(Uri fileUri, String mimeType) {
if (TextUtils.isEmpty(mimeType)) mimeType = DEFAULT_MIME_TYPE;
Intent intent = new Intent(Intent.ACTION_SEND);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
intent.putExtra(Intent.EXTRA_STREAM, fileUri);
intent.setType(mimeType);
return intent;
}
private static boolean isImageType(String mimeType) {
if (TextUtils.isEmpty(mimeType)) return false;
String[] pieces = mimeType.toLowerCase(Locale.getDefault()).split("/");
if (pieces.length != 2) return false;
return MIMETYPE_IMAGE.equals(pieces[0]);
}
private static boolean willExposeFileUri(Uri uri) {
// On Android N and later, an Exception is thrown if we try to expose a file:// URI.
return uri.getScheme().equals(ContentResolver.SCHEME_FILE)
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
}
}
| |
/*
* Copyright 2013-2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.sbe.generation.java;
import composite.elements.EnumOne;
import composite.elements.MessageHeaderEncoder;
import composite.elements.MsgDecoder;
import composite.elements.MsgEncoder;
import org.junit.Test;
import org.mockito.InOrder;
import org.agrona.BitUtil;
import org.agrona.DirectBuffer;
import org.agrona.concurrent.UnsafeBuffer;
import uk.co.real_logic.sbe.ir.Ir;
import uk.co.real_logic.sbe.ir.IrDecoder;
import uk.co.real_logic.sbe.ir.IrEncoder;
import uk.co.real_logic.sbe.ir.generated.MessageHeaderDecoder;
import uk.co.real_logic.sbe.otf.OtfHeaderDecoder;
import uk.co.real_logic.sbe.otf.OtfMessageDecoder;
import uk.co.real_logic.sbe.otf.TokenListener;
import uk.co.real_logic.sbe.xml.IrGenerator;
import uk.co.real_logic.sbe.xml.MessageSchema;
import uk.co.real_logic.sbe.xml.ParserOptions;
import uk.co.real_logic.sbe.xml.XmlSchemaParser;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
public class CompositeElementsGenerationTest
{
private static final MessageHeaderEncoder MESSAGE_HEADER = new MessageHeaderEncoder();
private static final MsgEncoder MSG_ENCODER = new MsgEncoder();
private static final int MSG_BUFFER_CAPACITY = 4 * 1024;
private static final int SCHEMA_BUFFER_CAPACITY = 16 * 1024;
@Test
public void shouldEncodeCorrectly()
{
final ByteBuffer encodedMsgBuffer = ByteBuffer.allocate(MSG_BUFFER_CAPACITY);
encodeTestMessage(encodedMsgBuffer);
final DirectBuffer decodeBuffer = new UnsafeBuffer(encodedMsgBuffer);
int offset = 0;
assertThat(decodeBuffer.getShort(offset), is((short)22));
offset += BitUtil.SIZE_OF_SHORT;
assertThat(decodeBuffer.getShort(offset), is((short)1));
offset += BitUtil.SIZE_OF_SHORT;
assertThat(decodeBuffer.getShort(offset), is((short)3));
offset += BitUtil.SIZE_OF_SHORT;
assertThat(decodeBuffer.getShort(offset), is((short)0));
offset += BitUtil.SIZE_OF_SHORT;
assertThat(decodeBuffer.getByte(offset), is((byte)10));
offset += BitUtil.SIZE_OF_BYTE;
assertThat(decodeBuffer.getByte(offset), is((byte)42));
offset += BitUtil.SIZE_OF_BYTE;
assertThat(decodeBuffer.getInt(offset), is(0x00_01_00_00));
offset += BitUtil.SIZE_OF_INT;
assertThat(decodeBuffer.getLong(offset), is(101L));
offset += BitUtil.SIZE_OF_LONG;
assertThat(decodeBuffer.getLong(offset), is(202L));
}
@Test
public void shouldDecodeCorrectly()
{
final ByteBuffer encodedMsgBuffer = ByteBuffer.allocate(MSG_BUFFER_CAPACITY);
encodeTestMessage(encodedMsgBuffer);
final DirectBuffer decodeBuffer = new UnsafeBuffer(encodedMsgBuffer);
final MessageHeaderDecoder hdrDecoder = new MessageHeaderDecoder();
final MsgDecoder msgDecoder = new MsgDecoder();
hdrDecoder.wrap(decodeBuffer, 0);
msgDecoder.wrap(
decodeBuffer, hdrDecoder.encodedLength(), MSG_ENCODER.sbeBlockLength(), MSG_ENCODER.sbeSchemaVersion());
assertThat(hdrDecoder.blockLength(), is(22));
assertThat(hdrDecoder.templateId(), is(1));
assertThat(hdrDecoder.schemaId(), is(3));
assertThat(hdrDecoder.version(), is(0));
assertThat(msgDecoder.structure().enumOne(), is(EnumOne.Value10));
assertThat(msgDecoder.structure().zeroth(), is((short)42));
assertThat(msgDecoder.structure().setOne().bit0(), is(false));
assertThat(msgDecoder.structure().setOne().bit16(), is(true));
assertThat(msgDecoder.structure().setOne().bit26(), is(false));
assertThat(msgDecoder.structure().inner().first(), is(101L));
assertThat(msgDecoder.structure().inner().second(), is(202L));
assertThat(msgDecoder.encodedLength(), is(22));
}
@Test
public void shouldDisplayCorrectly()
{
final ByteBuffer encodedMsgBuffer = ByteBuffer.allocate(MSG_BUFFER_CAPACITY);
encodeTestMessage(encodedMsgBuffer);
final String compositeString = MSG_ENCODER.structure().toString();
assertThat(compositeString, containsString("enumOne="));
assertThat(compositeString, not(containsString("enumOne=|")));
assertThat(compositeString, containsString("setOne="));
assertThat(compositeString, not(containsString("setOne=|")));
}
@Test
public void shouldOtfDecodeCorrectly() throws Exception
{
final ByteBuffer encodedSchemaBuffer = ByteBuffer.allocate(SCHEMA_BUFFER_CAPACITY);
encodeSchema(encodedSchemaBuffer);
final ByteBuffer encodedMsgBuffer = ByteBuffer.allocate(MSG_BUFFER_CAPACITY);
encodeTestMessage(encodedMsgBuffer);
encodedSchemaBuffer.flip();
final Ir ir = decodeIr(encodedSchemaBuffer);
final DirectBuffer decodeBuffer = new UnsafeBuffer(encodedMsgBuffer);
final OtfHeaderDecoder otfHeaderDecoder = new OtfHeaderDecoder(ir.headerStructure());
assertThat(otfHeaderDecoder.getBlockLength(decodeBuffer, 0), is(22));
assertThat(otfHeaderDecoder.getSchemaId(decodeBuffer, 0), is(3));
assertThat(otfHeaderDecoder.getTemplateId(decodeBuffer, 0), is(1));
assertThat(otfHeaderDecoder.getSchemaVersion(decodeBuffer, 0), is(0));
final TokenListener mockTokenListener = mock(TokenListener.class);
OtfMessageDecoder.decode(
decodeBuffer,
otfHeaderDecoder.encodedLength(),
MSG_ENCODER.sbeSchemaVersion(),
MSG_ENCODER.sbeBlockLength(),
ir.getMessage(MSG_ENCODER.sbeTemplateId()),
mockTokenListener);
final InOrder inOrder = inOrder(mockTokenListener);
inOrder.verify(mockTokenListener).onBeginComposite(any(), any(), eq(2), eq(17));
inOrder.verify(mockTokenListener).onEnum(any(), eq(decodeBuffer), eq(8), any(), eq(3), eq(6), eq(0));
inOrder.verify(mockTokenListener).onEncoding(any(), eq(decodeBuffer), eq(9), any(), eq(0));
inOrder.verify(mockTokenListener).onBitSet(any(), eq(decodeBuffer), eq(10), any(), eq(8), eq(12), eq(0));
inOrder.verify(mockTokenListener).onBeginComposite(any(), any(), eq(13), eq(16));
inOrder.verify(mockTokenListener).onEncoding(any(), eq(decodeBuffer), eq(14), any(), eq(0));
inOrder.verify(mockTokenListener).onEncoding(any(), eq(decodeBuffer), eq(22), any(), eq(0));
inOrder.verify(mockTokenListener).onEndComposite(any(), any(), eq(13), eq(16));
inOrder.verify(mockTokenListener).onEndComposite(any(), any(), eq(2), eq(17));
}
private static int encodeTestMessage(final ByteBuffer buffer)
{
final UnsafeBuffer directBuffer = new UnsafeBuffer(buffer);
int bufferOffset = 0;
MESSAGE_HEADER
.wrap(directBuffer, bufferOffset)
.blockLength(MSG_ENCODER.sbeBlockLength())
.templateId(MSG_ENCODER.sbeTemplateId())
.schemaId(MSG_ENCODER.sbeSchemaId())
.version(MSG_ENCODER.sbeSchemaVersion());
bufferOffset += MESSAGE_HEADER.encodedLength();
MSG_ENCODER.wrap(directBuffer, bufferOffset).structure()
.enumOne(EnumOne.Value10)
.zeroth((byte)42);
MSG_ENCODER.structure()
.setOne().clear().bit0(false).bit16(true).bit26(false);
MSG_ENCODER.structure().inner()
.first(101)
.second(202);
return MSG_ENCODER.encodedLength();
}
private static void encodeSchema(final ByteBuffer buffer) throws Exception
{
final Path path = Paths.get("src/test/resources/composite-elements-schema.xml");
try (InputStream in = new BufferedInputStream(Files.newInputStream(path)))
{
final MessageSchema schema = XmlSchemaParser.parse(in, ParserOptions.DEFAULT);
final Ir ir = new IrGenerator().generate(schema);
try (IrEncoder irEncoder = new IrEncoder(buffer, ir))
{
irEncoder.encode();
}
}
}
private static Ir decodeIr(final ByteBuffer buffer) throws IOException
{
try (IrDecoder irDecoder = new IrDecoder(buffer))
{
return irDecoder.decode();
}
}
}
| |
package com.cordys.coe.tools.useradmin.cordys;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import com.cordys.coe.tools.useradmin.cordys.exception.CordysException;
import com.cordys.coe.tools.useradmin.cordys.exception.CordysValidationException;
import com.cordys.coe.tools.useradmin.util.Util;
import com.cordys.cpc.bsf.busobject.BSF;
import com.cordys.cpc.bsf.soap.SOAPRequestObject;
import com.eibus.directory.soap.LDAPUtil;
import com.eibus.xml.nom.Document;
import com.eibus.xml.nom.Node;
import com.eibus.xml.nom.XMLException;
import com.eibus.xml.xpath.XPath;
import com.eibus.xml.xpath.XPathMetaInfo;
import com.novell.ldap.LDAPAttribute;
import com.novell.ldap.LDAPAttributeSet;
import com.novell.ldap.LDAPEntry;
/**
* Class to support role related functionality in the Cordys platform.
* Getting/updating user roles, getting all defined roles, etc.
*
* @author kekema
*
*/
public class Role
{
private String roleDN;
public Role(String roleDN)
{
this.roleDN = roleDN;
}
public String getRoleDN()
{
return this.roleDN;
}
/**
* Get list of user assigned roles (sorted)
*
* @param orgUserDN
* @return
*/
public static ArrayList<String> getAssignedRoles(String orgUserDN)
{
ArrayList<String> result = new ArrayList<String>();
try
{
LDAPEntry oldEntry = LDAP.getEntry(orgUserDN);
if (oldEntry != null)
{
LDAPAttributeSet attrs = oldEntry.getAttributeSet();
LDAPAttribute role = attrs.getAttribute("role");
if (role != null)
{
String[] oldRolesArray = role.getStringValueArray();
result = new ArrayList<String>(Arrays.asList(oldRolesArray));
Collections.sort(result);
}
}
}
catch (Exception e)
{
throw new CordysException("Not able to read role assignments for " + orgUserDN, e);
}
return result;
}
/**
* Get list of roles which are not assigned to the user (sorted)
*
* @param orgUserDN
* @return
*/
public static ArrayList<String> getUnassignedRoles(String orgUserDN)
{
ArrayList<String> unassignedRoles = new ArrayList<String>();
ArrayList<String> userRoles = null;
String ldapRoot = LDAP.getRoot();
try
{
userRoles = getAssignedRoles(orgUserDN);
// get all roles as defined in LDAP (excl internal roles)
LDAPEntry[] ldapresult = LDAP.searchLDAPEntries(ldapRoot, 2, "(&(objectclass=busorganizationalrole)(|(busorganizationalroletype=Application)(busorganizationalroletype=Functional)))", null, false);
for (LDAPEntry ldapEntry:ldapresult)
{
String dn = ldapEntry.getDN();
boolean include = true;
// if organizational role, only include if defined in current organization
if (dn.indexOf("organizational roles") > 0)
{
include = (dn.indexOf(BSF.getOrganization()) > 0);
}
if (include)
{
int pIndex = dn.indexOf("cn=packages,o=");
if (pIndex > 0)
{
// role deployed in an organizational space
include = (dn.indexOf(BSF.getOrganization()) > 0);
dn = dn.substring(0, pIndex) + ldapRoot;
}
}
if (include)
{
// only include if not assigned yet to user
include = ((userRoles != null) && !(userRoles.contains(dn)));
}
if (include)
{
if (!unassignedRoles.contains(dn))
{
unassignedRoles.add(dn);
}
}
}
Collections.sort(unassignedRoles);
}
catch (Exception e)
{
throw new CordysException("Not able to determine unassigned roles for " + orgUserDN, e);
}
return unassignedRoles;
}
/**
* Get all user assigned and unassigned roles, where for each role, the whole tree is given, so including
* the subroles, it's subroles, etc.
* The result comes as a NOM xml.
*
* @param orgUserDN
* @return
*/
public static int getUserRoleTrees(String orgUserDN)
{
Document xmlDoc = BSF.getXMLDocument();
int treeNode = xmlDoc.createElement("tree");
int assignedNode = xmlDoc.createElement("assigned", treeNode);
int userNode = getAssignedRolesTree(orgUserDN);
Node.appendToChildren(userNode, assignedNode);
int unassignedNode = xmlDoc.createElement("unassigned", treeNode);
int rolesNode = getUnassignedRolesTree(orgUserDN);
Node.appendToChildren(rolesNode, unassignedNode);
return treeNode;
}
private static int getAssignedRolesTree(String orgUserDN)
{
int result = 0;
int response = 0;
try
{
response = getRoleTree(orgUserDN, null);
if (response > 0)
{
result = XPath.getFirstMatch(".//user", new XPathMetaInfo(), response);
if (result > 0)
{
result = Node.unlink(result);
}
}
}
finally
{
if (response > 0)
{
Node.delete(response);
response = 0;
}
}
return result;
}
private static int getUnassignedRolesTree(String orgUserDN)
{
int result = 0;
try
{
result = BSF.getXMLDocument().parseString("<roles/>");
// get all unassigned roles;
// per role, get the role tree
ArrayList<String> unassignedRoles = getUnassignedRoles(orgUserDN);
for (String roleDN : unassignedRoles)
{
int response = 0;
try
{
response = getRoleTree(roleDN, null);
if (response > 0)
{
int roleNode = XPath.getFirstMatch(".//role", new XPathMetaInfo(), response);
if (roleNode > 0)
{
roleNode = Node.unlink(roleNode);
Node.appendToChildren(roleNode, result);
}
}
}
finally
{
if (response > 0)
{
Node.delete(response);
response = 0;
}
}
}
}
catch (Exception e)
{
throw new CordysException("Not able to compose unassigned roles tree for user " + orgUserDN, e);
}
return result;
}
/**
* Get roles tree for either a user (assigned role tree) or a role (role tree)
*
* @param dn DN of a user or a role
* @param depth
* @return
*/
public static int getRoleTree(String dn, String depth)
{
if (!Util.isSet(depth))
{
depth = "10";
}
String namespace = "http://schemas.cordys.com/1.0/ldap";
String methodName = "GetRoles";
String[] paramNames = new String[] { "dn", "depth" };
Object[] paramValues = new Object[] { dn, depth };
SOAPRequestObject sro = new SOAPRequestObject(namespace, methodName, paramNames, paramValues);
int response = 0;
try
{
response = sro.execute();
}
catch (Exception e)
{
throw new CordysException("Not able to read roles from LDAP for " + dn, e);
}
return response;
}
/**
* Update the user roles as per the given list.
* The update is only executed when there is a change compared to the current roles in LDAP.
*
* @param orgUserDN
* @param assignedRoleDNs
*/
public static void updateAssignedRoles(String orgUserDN, ArrayList<String> assignedRoleDNs)
{
Collections.sort(assignedRoleDNs);
try
{
// real user from LDAP and get the role attributes
LDAPEntry oldEntry = LDAP.getEntry(orgUserDN);
boolean updateUser = false;
if (oldEntry != null)
{
LDAPEntry newEntry = LDAPUtil.cloneEntry(oldEntry);
LDAPAttributeSet attrs = newEntry.getAttributeSet();
LDAPAttribute role = attrs.getAttribute("role");
ArrayList<String> oldRoles = null;
if (role != null)
{
String[] oldRolesArray = role.getStringValueArray();
oldRoles = new ArrayList<String>(Arrays.asList(oldRolesArray));
Collections.sort(oldRoles);
// check if user roles has changed
if (assignedRoleDNs.size() == oldRoles.size())
{
for (String roleDN : assignedRoleDNs)
{
if (!oldRoles.contains(roleDN))
{
updateUser = true;
break;
}
}
}
else
{
updateUser = true;
}
if (updateUser)
{
attrs.remove(role);
}
}
else
{
updateUser = (assignedRoleDNs.size() > 0);
}
if (updateUser)
{
String validationMessage = validateUpdate(oldRoles, assignedRoleDNs);
if (!Util.isSet(validationMessage))
{
if (assignedRoleDNs.size() > 0)
{
role = new LDAPAttribute("role");
for (String roleDN : assignedRoleDNs)
{
role.addValue(roleDN);
}
attrs.add(role);
}
LDAP.updateEntry(oldEntry, newEntry);
}
else
{
throw new CordysValidationException(validationMessage);
}
}
}
}
catch (CordysValidationException e)
{
throw e;
}
catch (Exception e)
{
throw new CordysException("Not able to update roles for user " + orgUserDN, e);
}
}
/**
* Check for any validation errors on roles update
*
* @param oldRoles
* @param assignedRoles
* @return
*/
private static String validateUpdate(ArrayList<String> oldRoles, ArrayList<String> assignedRoles)
{
String validationMessage = null;
// check for condition that at least one user has administrator role
if ((oldRoles != null) && (oldRoles.size() > 0))
{
String ldaproot = LDAP.getRoot();
String adminRole = "cn=Administrator,cn=Cordys@Work," + ldaproot;
String orgAdminRole = "cn=organizationalAdmin,cn=Cordys ESBServer," + ldaproot;
if ((oldRoles.contains(adminRole) && (!assignedRoles.contains(adminRole) && (!assignedRoles.contains(orgAdminRole)))) ||
(oldRoles.contains(orgAdminRole) && (!assignedRoles.contains(adminRole) && (!assignedRoles.contains(orgAdminRole)))))
{
if (!Organization.hasMultipleAdmins(BSF.getOrganization()))
{
validationMessage = "Atleast one user must have organizationalAdmin role or Administrator role.";
}
}
}
return validationMessage;
}
/**
* Assign additional roles and/or unassign user roles
*
* @param orgUserDN
* @param addedRoleDNs assign these roles in addition to existing roles
* @param removedRoleDNs unassign these roles from existing roles
*/
public static void maintainUserRoles(String orgUserDN, ArrayList<String> addedRoleDNs, ArrayList<String> removedRoleDNs)
{
try
{
LDAPEntry oldEntry = LDAP.getEntry(orgUserDN);
if (oldEntry != null)
{
ArrayList<String> roleDNs = null;
LDAPEntry newEntry = LDAPUtil.cloneEntry(oldEntry);
LDAPAttributeSet attrs = newEntry.getAttributeSet();
LDAPAttribute role = attrs.getAttribute("role");
if (role != null)
{
String[] currentRolesArray = role.getStringValueArray();
roleDNs = new ArrayList<String>(Arrays.asList(currentRolesArray));
attrs.remove(role);
}
else
{
roleDNs = new ArrayList<String>();
}
ArrayList<String> oldRoleDNs = new ArrayList<String>(roleDNs);
if ((removedRoleDNs != null) && (removedRoleDNs.size() > 0))
{
roleDNs.removeAll(removedRoleDNs);
}
if ((addedRoleDNs != null) && (addedRoleDNs.size() > 0))
{
// remove duplicates
roleDNs.removeAll(addedRoleDNs);
// add new roles
roleDNs.addAll(addedRoleDNs);
}
Collections.sort(roleDNs);
if (!roleDNs.equals(oldRoleDNs))
{
String validationMessage = validateUpdate(oldRoleDNs, roleDNs);
if (!Util.isSet(validationMessage))
{
if (roleDNs.size() > 0)
{
role = new LDAPAttribute("role");
for (String roleDN : roleDNs)
{
role.addValue(roleDN);
}
attrs.add(role);
}
LDAP.updateEntry(oldEntry, newEntry);
}
else
{
throw new CordysValidationException(validationMessage);
}
}
}
}
catch (CordysValidationException e)
{
throw e;
}
catch (Exception e)
{
throw new CordysException("Not able to update roles for user " + orgUserDN, e);
}
}
/**
* Get all isv roles plus roles of current organization.
*
* @return
*/
public static ArrayList<String> getAllRoles(boolean inclInternal)
{
return (getRoles(inclInternal, false));
}
/**
* Get org roles of current organization.
*
* @return
*/
public static ArrayList<String> getOrganizationalRoles()
{
return (getRoles(false, true));
}
/**
* Get isv and/or organizational roles
*
* @return
*/
private static ArrayList<String> getRoles(boolean inclInternal, boolean onlyOrganizational)
{
ArrayList<String> result = new ArrayList<String>();
String ldapRoot = LDAP.getRoot();
try
{
String filter = "(&(objectclass=busorganizationalrole)(|(busorganizationalroletype=Application)(busorganizationalroletype=Functional)";
if (inclInternal)
{
filter = filter + "(busorganizationalroletype=Internal)";
}
filter = filter + "))";
LDAPEntry[] ldapresult = LDAP.searchLDAPEntries(ldapRoot, 2, filter, null, false);
for (LDAPEntry ldapEntry:ldapresult)
{
String dn = ldapEntry.getDN();
// String description = ldapEntry.getAttribute("description").getStringValue(); // not used for now; role description rarely different from role name
boolean include = false;
if (dn.indexOf("organizational roles") > 0)
{
include = (dn.indexOf(BSF.getOrganization()) > 0);
}
else
{
include = (!onlyOrganizational);
}
if (!onlyOrganizational)
{
int pIndex = dn.indexOf("cn=packages,o=");
if (pIndex > 0)
{
// role deployed in an organizational space
include = (dn.indexOf(BSF.getOrganization()) > 0);
dn = dn.substring(0, pIndex) + ldapRoot;
}
}
if (include)
{
// in case the role was also deployed in shared space, the result array might already contain the roleDN
if (!result.contains(dn))
{
result.add(dn);
}
}
}
Collections.sort(result, String.CASE_INSENSITIVE_ORDER);
}
catch (Exception e)
{
throw new CordysException("Not able to read all roles", e);
}
return result;
}
/**
* Gives a HashMap of <role name, role DN>
* @return
*/
public static HashMap<String, String> getAllRolesByName()
{
HashMap<String, String> result = new HashMap<String, String>();
ArrayList<String> allRoleDNs = getAllRoles(false);
for (String roleDN : allRoleDNs)
{
String roleName = Util.getNameFromDN(roleDN);
result.put(roleName, roleDN);
}
return result;
}
/**
* Get (first level) subroles for a role
*
* @param roleDN
* @return
*/
public static ArrayList<String> getSubroles(String roleDN)
{
ArrayList<String> result = new ArrayList<String>();
try
{
LDAPEntry le = getLDAPRoleEntry(roleDN);
LDAPAttributeSet attrs = le.getAttributeSet();
LDAPAttribute role = attrs.getAttribute("role");
if (role != null)
{
String[] roleRoles = role.getStringValueArray();
result = new ArrayList<String>(Arrays.asList(roleRoles));
}
}
catch (Exception e)
{
throw new CordysException("Not able to read role from LDAP.", e);
}
Collections.sort(result, String.CASE_INSENSITIVE_ORDER);
return result;
}
/**
* Get role entry from LDAP
* @param roleDN
* @return
*/
public static LDAPEntry getLDAPRoleEntry(String roleDN)
{
LDAPEntry result = null;
if (roleDN.indexOf("cn=organizational roles") == -1)
{
// try role in org space first
String ldapRoot = LDAP.getRoot();
int lrIndex = roleDN.indexOf(ldapRoot);
String orgSpaceRoleDN = roleDN.substring(0, lrIndex) + "cn=packages," + BSF.getOrganization();
result = LDAP.getEntry(orgSpaceRoleDN);
}
if (result == null)
{
result = LDAP.getEntry(roleDN);
}
return result;
}
/**
* Assign additional subroles and/or unassign subroles.
* Checks for circular assignments to be done in calling method.
*
* @param roleDN
* @param addedSubroleDNs assign these subroles in addition to existing roles
* @param removedSubroleDNs unassign these subroles from existing roles
*/
public static void maintainSubroles(String mainRoleDN, ArrayList<String> addedSubroleDNs, ArrayList<String> removedSubroleDNs)
{
// only organizational roles can be updated
if (mainRoleDN.indexOf("cn=organizational roles") != -1)
{
try
{
LDAPEntry oldEntry = LDAP.getEntry(mainRoleDN);
if (oldEntry != null)
{
ArrayList<String> roleDNs = null;
LDAPEntry newEntry = LDAPUtil.cloneEntry(oldEntry);
LDAPAttributeSet attrs = newEntry.getAttributeSet();
LDAPAttribute role = attrs.getAttribute("role");
if (role != null)
{
String[] currentRolesArray = role.getStringValueArray();
roleDNs = new ArrayList<String>(Arrays.asList(currentRolesArray));
attrs.remove(role);
}
else
{
roleDNs = new ArrayList<String>();
}
if ((removedSubroleDNs != null) && (removedSubroleDNs.size() > 0))
{
roleDNs.removeAll(removedSubroleDNs);
}
if ((addedSubroleDNs != null) && (addedSubroleDNs.size() > 0))
{
// remove duplicates
roleDNs.removeAll(addedSubroleDNs);
// add new roles
roleDNs.addAll(addedSubroleDNs);
}
if (roleDNs.size() > 0)
{
Collections.sort(roleDNs);
role = new LDAPAttribute("role");
for (String roleDN : roleDNs)
{
role.addValue(roleDN);
}
attrs.add(role);
}
LDAP.updateEntry(oldEntry, newEntry);
}
}
catch (CordysValidationException e)
{
throw e;
}
catch (Exception e)
{
throw new CordysException("Not able to update subroles for role " + mainRoleDN, e);
}
}
}
/**
* Add subrole to mainrole
*
* @param mainRoleDN
* @param subroleDN
*/
public static void addRole(String mainRoleDN, String subroleDN)
{
ArrayList<String> addedSubroleDNs = new ArrayList<String>();
addedSubroleDNs.add(subroleDN);
maintainSubroles(mainRoleDN, addedSubroleDNs, null);
}
/**
* Remove subrole from mainrole
*
* @param mainRoleDN
* @param subroleDN
*/
public static void removeRole(String mainRoleDN, String subroleDN)
{
ArrayList<String> removedSubroleDNs = new ArrayList<String>();
removedSubroleDNs.add(subroleDN);
maintainSubroles(mainRoleDN, null, removedSubroleDNs);
}
/**
* Check if the role has a certain subrole
*
* @param subroleDN
* @param multilevel if false, only first level is checked; else the subroletree (10 levels)
* @return
*/
public boolean hasSubrole(String subroleDN, boolean multilevel)
{
boolean result = false;
if (!multilevel)
{
ArrayList<String> subroleDNs = getSubroles(this.getRoleDN());
result = (subroleDNs.contains(subroleDNs));
}
else
{
int subroletree = 0;
try
{
subroletree = getRoleTree(this.getRoleDN(), "10");
if (subroletree > 0)
{
String treeString = Node.writeToString(subroletree, false);
result = (treeString.indexOf(subroleDN) != -1);
}
}
finally
{
if (subroletree > 0)
{
Node.delete(subroletree);
}
}
}
return result;
}
/**
* Add a (functional) role to current organization
*
* @param roleName
*/
public static void addRole(String roleName)
{
LDAPAttributeSet attrs = null;
LDAPEntry newEntry = null;
String roleDN = "cn=" + roleName + ",cn=organizational roles," + BSF.getOrganization();
if (!LDAP.entryExists(roleDN))
{
// not existing yet, so create
newEntry = new LDAPEntry(roleDN);
attrs = newEntry.getAttributeSet();
LDAPAttribute attr = new LDAPAttribute("objectclass", "top");
attr.addValue("busorganizationalrole");
attr.addValue("busorganizationalobject");
attrs.add(attr);
attrs.add(new LDAPAttribute("cn", roleName));
attrs.add(new LDAPAttribute("description", roleName));
attrs.add(new LDAPAttribute("busorganizationalroletype", "Functional"));
LDAPAttribute role = new LDAPAttribute("role");
String defaultRole = "cn=everyoneIn" + Util.getCurrentOrgName() + ",cn=organizational roles," + BSF.getOrganization();
role.addValue(defaultRole);
attrs.add(role);
LDAP.insertEntry(newEntry);
}
}
/**
* Delete org role from LDAP
* @param roleDN
*/
public static void deleteRole(String roleDN)
{
if (Util.isSet(roleDN) && (roleDN.indexOf("cn=organizational roles") != -1))
{
try
{
if (LDAP.entryExists(roleDN))
{
LDAP.deleteEntriesRecursive(roleDN);
}
}
catch (Exception e)
{
throw new CordysException("Not able to remove role from LDAP: " + roleDN, e);
}
}
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.api.score.buildin.hardmediumsoft;
import java.util.Objects;
import org.optaplanner.core.api.domain.solution.PlanningSolution;
import org.optaplanner.core.api.score.AbstractScore;
import org.optaplanner.core.api.score.FeasibilityScore;
import org.optaplanner.core.api.score.Score;
/**
* This {@link Score} is based on 3 levels of int constraints: hard, medium and soft.
* Hard constraints have priority over medium constraints.
* Medium constraints have priority over soft constraints.
* Hard constraints determine feasibility.
* <p>
* This class is immutable.
*
* @see Score
*/
public final class HardMediumSoftScore extends AbstractScore<HardMediumSoftScore>
implements FeasibilityScore<HardMediumSoftScore> {
public static final HardMediumSoftScore ZERO = new HardMediumSoftScore(0, 0, 0, 0);
public static final HardMediumSoftScore ONE_HARD = new HardMediumSoftScore(0, 1, 0, 0);
public static final HardMediumSoftScore ONE_MEDIUM = new HardMediumSoftScore(0, 0, 1, 0);
public static final HardMediumSoftScore ONE_SOFT = new HardMediumSoftScore(0, 0, 0, 1);
private static final String HARD_LABEL = "hard";
private static final String MEDIUM_LABEL = "medium";
private static final String SOFT_LABEL = "soft";
public static HardMediumSoftScore parseScore(String scoreString) {
String[] scoreTokens = parseScoreTokens(HardMediumSoftScore.class, scoreString,
HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
int initScore = parseInitScore(HardMediumSoftScore.class, scoreString, scoreTokens[0]);
int hardScore = parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[1]);
int mediumScore = parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[2]);
int softScore = parseLevelAsInt(HardMediumSoftScore.class, scoreString, scoreTokens[3]);
return ofUninitialized(initScore, hardScore, mediumScore, softScore);
}
public static HardMediumSoftScore ofUninitialized(int initScore, int hardScore, int mediumScore, int softScore) {
return new HardMediumSoftScore(initScore, hardScore, mediumScore, softScore);
}
/**
* @deprecated in favor of {@link #ofUninitialized(int, int, int, int)}
*/
@Deprecated
public static HardMediumSoftScore valueOfUninitialized(int initScore, int hardScore, int mediumScore, int softScore) {
return new HardMediumSoftScore(initScore, hardScore, mediumScore, softScore);
}
public static HardMediumSoftScore of(int hardScore, int mediumScore, int softScore) {
return new HardMediumSoftScore(0, hardScore, mediumScore, softScore);
}
/**
* @deprecated in favor of {@link #of(int, int, int)}
*/
@Deprecated
public static HardMediumSoftScore valueOf(int hardScore, int mediumScore, int softScore) {
return new HardMediumSoftScore(0, hardScore, mediumScore, softScore);
}
public static HardMediumSoftScore ofHard(int hardScore) {
return new HardMediumSoftScore(0, hardScore, 0, 0);
}
public static HardMediumSoftScore ofMedium(int mediumScore) {
return new HardMediumSoftScore(0, 0, mediumScore, 0);
}
public static HardMediumSoftScore ofSoft(int softScore) {
return new HardMediumSoftScore(0, 0, 0, softScore);
}
// ************************************************************************
// Fields
// ************************************************************************
private final int hardScore;
private final int mediumScore;
private final int softScore;
/**
* Private default constructor for default marshalling/unmarshalling of unknown frameworks that use reflection.
* Such integration is always inferior to the specialized integration modules, such as
* optaplanner-persistence-jpa, optaplanner-persistence-xstream, optaplanner-persistence-jaxb, ...
*/
@SuppressWarnings("unused")
private HardMediumSoftScore() {
super(Integer.MIN_VALUE);
hardScore = Integer.MIN_VALUE;
mediumScore = Integer.MIN_VALUE;
softScore = Integer.MIN_VALUE;
}
private HardMediumSoftScore(int initScore, int hardScore, int mediumScore, int softScore) {
super(initScore);
this.hardScore = hardScore;
this.mediumScore = mediumScore;
this.softScore = softScore;
}
/**
* The total of the broken negative hard constraints and fulfilled positive hard constraints.
* Their weight is included in the total.
* The hard score is usually a negative number because most use cases only have negative constraints.
*
* @return higher is better, usually negative, 0 if no hard constraints are broken/fulfilled
*/
public int getHardScore() {
return hardScore;
}
/**
* The total of the broken negative medium constraints and fulfilled positive medium constraints.
* Their weight is included in the total.
* The medium score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the medium score is irrelevant if the 2 scores don't have the same hard score.
*
* @return higher is better, usually negative, 0 if no medium constraints are broken/fulfilled
*/
public int getMediumScore() {
return mediumScore;
}
/**
* The total of the broken negative soft constraints and fulfilled positive soft constraints.
* Their weight is included in the total.
* The soft score is usually a negative number because most use cases only have negative constraints.
* <p>
* In a normal score comparison, the soft score is irrelevant if the 2 scores don't have the same hard and medium score.
*
* @return higher is better, usually negative, 0 if no soft constraints are broken/fulfilled
*/
public int getSoftScore() {
return softScore;
}
// ************************************************************************
// Worker methods
// ************************************************************************
@Override
public HardMediumSoftScore withInitScore(int newInitScore) {
return new HardMediumSoftScore(newInitScore, hardScore, mediumScore, softScore);
}
/**
* A {@link PlanningSolution} is feasible if it has no broken hard constraints.
*
* @return true if the {@link #getHardScore()} is 0 or higher
*/
@Override
public boolean isFeasible() {
return initScore >= 0 && hardScore >= 0;
}
@Override
public HardMediumSoftScore add(HardMediumSoftScore addend) {
return new HardMediumSoftScore(
initScore + addend.getInitScore(),
hardScore + addend.getHardScore(),
mediumScore + addend.getMediumScore(),
softScore + addend.getSoftScore());
}
@Override
public HardMediumSoftScore subtract(HardMediumSoftScore subtrahend) {
return new HardMediumSoftScore(
initScore - subtrahend.getInitScore(),
hardScore - subtrahend.getHardScore(),
mediumScore - subtrahend.getMediumScore(),
softScore - subtrahend.getSoftScore());
}
@Override
public HardMediumSoftScore multiply(double multiplicand) {
return new HardMediumSoftScore(
(int) Math.floor(initScore * multiplicand),
(int) Math.floor(hardScore * multiplicand),
(int) Math.floor(mediumScore * multiplicand),
(int) Math.floor(softScore * multiplicand));
}
@Override
public HardMediumSoftScore divide(double divisor) {
return new HardMediumSoftScore(
(int) Math.floor(initScore / divisor),
(int) Math.floor(hardScore / divisor),
(int) Math.floor(mediumScore / divisor),
(int) Math.floor(softScore / divisor));
}
@Override
public HardMediumSoftScore power(double exponent) {
return new HardMediumSoftScore(
(int) Math.floor(Math.pow(initScore, exponent)),
(int) Math.floor(Math.pow(hardScore, exponent)),
(int) Math.floor(Math.pow(mediumScore, exponent)),
(int) Math.floor(Math.pow(softScore, exponent)));
}
@Override
public HardMediumSoftScore negate() {
return new HardMediumSoftScore(-initScore, -hardScore, -mediumScore, -softScore);
}
@Override
public Number[] toLevelNumbers() {
return new Number[] { hardScore, mediumScore, softScore };
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (o instanceof HardMediumSoftScore) {
HardMediumSoftScore other = (HardMediumSoftScore) o;
return initScore == other.getInitScore()
&& hardScore == other.getHardScore()
&& mediumScore == other.getMediumScore()
&& softScore == other.getSoftScore();
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(initScore, hardScore, mediumScore, softScore);
}
@Override
public int compareTo(HardMediumSoftScore other) {
if (initScore != other.getInitScore()) {
return Integer.compare(initScore, other.getInitScore());
} else if (hardScore != other.getHardScore()) {
return Integer.compare(hardScore, other.getHardScore());
} else if (mediumScore != other.getMediumScore()) {
return Integer.compare(mediumScore, other.getMediumScore());
} else {
return Integer.compare(softScore, other.getSoftScore());
}
}
@Override
public String toShortString() {
return buildShortString((n) -> ((Integer) n).intValue() != 0, HARD_LABEL, MEDIUM_LABEL, SOFT_LABEL);
}
@Override
public String toString() {
return getInitPrefix() + hardScore + HARD_LABEL + "/" + mediumScore + MEDIUM_LABEL + "/" + softScore + SOFT_LABEL;
}
@Override
public boolean isCompatibleArithmeticArgument(Score otherScore) {
return otherScore instanceof HardMediumSoftScore;
}
}
| |
package utils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.Map.Entry;
public class Props {
private final static Logger log = LogManager.getLogger(Props.class);
private static final String CONFIG_PROPERTIES = "config.properties";
private static final String GLOBAL_PROPERTIES = "global.properties";
static {
try {
boolean exists = new File(GLOBAL_PROPERTIES).exists();
new FileOutputStream(Props.CONFIG_PROPERTIES, true).close();
new FileOutputStream(Props.GLOBAL_PROPERTIES, true).close();
// Out right write the file if it doesnt exist.
if (!exists) {
Props.setGlobalProperty(GlobalProperties.SORT_BY_NAME, GlobalDefaults.SORT_BY_NAME_DEFAULT);
Props.setGlobalProperty(GlobalProperties.APP_NAME, GlobalDefaults.APP_NAME_DEFAULT);
Props.setGlobalProperty(GlobalProperties.SHOW_COUNTER_OPTIONS,
GlobalDefaults.SHOW_COUNTER_OPTIONS_DEFAULT);
Props.setGlobalProperty(GlobalProperties.SHOW_COUTER_POPUP_EACH_START,
GlobalDefaults.SHOW_COUTER_POPUP_EACH_START_DEFAULT);
Props.setGlobalProperty(GlobalProperties.VERSION, GlobalDefaults.VERSION_DEFAULT);
Props.setGlobalProperty(GlobalProperties.CALL_FORWARDS, GlobalDefaults.CALL_FORWARDS_DEFAULT);
Props.setGlobalProperty(GlobalProperties.CALL_FORWARDS_SERVICE,
GlobalDefaults.CALL_FORWARDS_SERVICE_DEFAULT);
Props.setGlobalProperty(GlobalProperties.NOTIFICATIONS, GlobalDefaults.NOTIFICATIONS_DEFAULT);
Props.setGlobalProperty(GlobalProperties.RECEPTION1WORKSTATION2BOTH0,
GlobalDefaults.RECEPTION1WORKSTATION2BOTH0_DEFAULT);
}
Map<String, String> valueMap = new HashMap<>();
Map<String, Map<String, String>> valuePair = new HashMap<>();
// Check each one props exist
/*
* Use reflection to find all varialbes in global properties and
* global defaults and set them.
*
* TODO migrate to an object list at some point
*/
Field[] values = GlobalProperties.class.getDeclaredFields();
for (Field field : values) {
field.setAccessible(true);
if (field.getType().equals(String.class)) {
String keyNameValue = field.get(GlobalProperties.class).toString();
valueMap.put(field.getName(), keyNameValue);
}
}
Field[] defaults = GlobalDefaults.class.getDeclaredFields();
for (Field field : defaults) {
field.setAccessible(true);
if (field.getType().equals(String.class)) {
Map<String, String> defaultPair = new HashMap<>();
String fieldStr = field.getName();
Object value = field.get(GlobalDefaults.class);
defaultPair.put(fieldStr, value.toString());
for (Entry<String, String> key : valueMap.entrySet()) {
if (fieldStr.equals(key.getKey() + "_DEFAULT"))
valuePair.put(key.getValue(), defaultPair);
}
}
}
for (Entry<String, Map<String, String>> valuePairObj : valuePair.entrySet()) {
Set<Entry<String, String>> entrySet = valuePairObj.getValue().entrySet();
for (Entry<String, String> entry : entrySet) {
String propKey = valuePairObj.getKey();
String propValue = entry.getValue();
String globalProperty = Props.getGlobalProperty(propKey);
if (globalProperty == null) {
log.info("found null value of key" + propKey + ", setting default of + " + propValue);
Props.setGlobalProperty(propKey, propValue);
}
}
}
} catch (IOException | IllegalArgumentException | IllegalAccessException e) {
log.error(e);
}
}
private static void setGlobalProperty(String propKey, String propVal) {
setProperty(propKey, propVal, false);
}
public static void setUserProperty(String propKey, String propVal) {
setProperty(propKey, propVal, true);
}
public static String getGlobalProperty(String propKey) {
return getProperty(propKey, false);
}
public static String getLangProperty(String propKey) {
String propFile = "messages.properties";
return getPropertyInternal(propKey, propFile);
}
public static String getUserProperty(String propKey) {
return getProperty(propKey, true);
}
private static String getProperty(String propKey, boolean userProps) {
String propFile = userProps ? CONFIG_PROPERTIES : GLOBAL_PROPERTIES;
return getPropertyInternal(propKey, propFile);
}
private static void setProperty(String propKey, String propVal, boolean userProps) {
String propFile = userProps ? CONFIG_PROPERTIES : GLOBAL_PROPERTIES;
File f = new File(propFile);
if (!f.exists() && !f.isDirectory()) {
Path file = Paths.get(propFile);
try {
Files.write(file, new ArrayList<String>(), Charset.forName("UTF-8"));
} catch (IOException e) {
log.error(e);
}
}
try (InputStream input = new FileInputStream(propFile)) {
Properties prop = new Properties();
prop.load(input);
// set the properties value
prop.put(propKey, propVal);
// save properties to project root folder
try (OutputStream output = new FileOutputStream(propFile)) {
prop.store(output, null);
}
} catch (IOException io) {
log.error(io);
throw new RuntimeException("value not set for key: " + propKey + " - value: " + propVal);
}
}
private static String getPropertyInternal(String propKey, String propFile) {
try (InputStream input = new FileInputStream(propFile)) {
Properties prop = new Properties();
// load a properties file
prop.load(input);
String property = prop.getProperty(propKey);
if (property != null && !property.isEmpty()) {
return property.trim();
}
return property;
} catch (IOException ex) {
log.error(ex);
throw new RuntimeException("value not found for key: " + propKey);
}
}
public static void deleteProperty(String string, boolean userProps) {
String propFile = userProps ? CONFIG_PROPERTIES : GLOBAL_PROPERTIES;
try (InputStream input = new FileInputStream(propFile)) {
Properties prop = new Properties();
prop.load(input);
// set the properties value
prop.remove(string);
// save properties to project root folder
try (OutputStream output = new FileOutputStream(propFile)) {
prop.store(output, null);
}
} catch (IOException io) {
log.error(io);
throw new RuntimeException("value not deleted for key: " + string);
}
}
public static class GlobalProperties {
public final static String SORT_BY_NAME = "soryByName";
public final static String APP_NAME = "appName";
public final static String SHOW_COUNTER_OPTIONS = "showCounterOptions";
public final static String SHOW_COUTER_POPUP_EACH_START = "showCounterPopUPOnEachStart";
static final String VERSION = "Version";
public final static String CALL_FORWARDS = "callForwards";
public final static String CALL_FORWARDS_SERVICE = "callForwardsServiceId";
public final static String NOTIFICATIONS = "notifications";
public final static String RECEPTION1WORKSTATION2BOTH0 = "reception1Workstation2Both0";
}
/**
* Names must start with the property name with default at the end
*
* @author adamea
*/
public static class GlobalDefaults {
static final String SORT_BY_NAME_DEFAULT = "true";
static final String APP_NAME_DEFAULT = "Build";
static final String SHOW_COUNTER_OPTIONS_DEFAULT = "true";
static final String SHOW_COUTER_POPUP_EACH_START_DEFAULT = "false";
public final static String VERSION_DEFAULT = "1.0.0.6";
static final String CALL_FORWARDS_DEFAULT = "false";
static final String CALL_FORWARDS_SERVICE_DEFAULT = "1";
static final String NOTIFICATIONS_DEFAULT = "true";
static final String RECEPTION1WORKSTATION2BOTH0_DEFAULT = "0";
}
}
| |
/*
Copyright (c) 2011,2012,
Saswat Anand (saswat@gatech.edu)
Mayur Naik (naik@cc.gatech.edu)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
package acteve.instrumentor;
import java.util.ArrayList;
import java.util.List;
import soot.ArrayType;
import soot.BooleanType;
import soot.ByteType;
import soot.CharType;
import soot.DoubleType;
import soot.FloatType;
import soot.IntType;
import soot.Local;
import soot.LongType;
import soot.Modifier;
import soot.PrimType;
import soot.RefType;
import soot.Scene;
import soot.ShortType;
import soot.SootClass;
import soot.SootMethod;
import soot.SootMethodRef;
import soot.Type;
import soot.jimple.InstanceInvokeExpr;
import soot.jimple.Jimple;
import soot.jimple.NullConstant;
import soot.jimple.Stmt;
import soot.jimple.StringConstant;
public class SymOpsClassGenerator
{
private static SootClass klass;
private static final int mod = Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL;
private static final String PACKAGE_NAME = "acteve.symbolic.";
private static final String Equality = "integer.Equality";
private static final String Algebraic = "integer.Algebraic";
private static final String CMP = "integer.CMP";
private static final String Bitwise = "integer.Bitwise";
private static final String LongCMP = "integer.LongExpression";
private static final String IntCMP = "integer.IntegerExpression";
private static final String Array = "array.Array";
private static final String Strings = "string.String";
static SootClass generate()
{
klass = new SootClass(G.SYMOPS_CLASS_NAME, Modifier.PUBLIC);
klass.setSuperclass(Scene.v().getSootClass("java.lang.Object"));
klass.setApplicationClass();
addAlgebraicMethods();
addEqualityMethods();
addCMPMethods();
addBitwiseMethods();
addLongCMPMethod();
addIntCMPMethods();
addArrayMethods();
addStringMethods();
return klass;
}
static void addAlgebraicMethods()
{
SootClass algebraicInterface = Scene.v().getSootClass(PACKAGE_NAME+Algebraic);
String[] binops = new String[]{"+", "-", "*", "/", "%"};
PrimType[] primTypes = new PrimType[]{IntType.v(), LongType.v(), FloatType.v(), DoubleType.v()};
addBinaryMethods(binops, primTypes, algebraicInterface);
addUnaryMethod(G.negMethodName, new Type[0], algebraicInterface);
addUnaryMethod(G.castMethodName, new Type[]{IntType.v()}, algebraicInterface);
}
static void addEqualityMethods()
{
SootClass equalityInterface = Scene.v().getSootClass(PACKAGE_NAME+Equality);
addBinaryMethods(new String[]{"==", "!="}, new Type[]{IntType.v(), RefType.v("java.lang.Object")}, equalityInterface);
}
static void addStringMethods()
{
SootClass stringsInterface = Scene.v().getSootClass(PACKAGE_NAME+Strings);
addBinaryMethods(new String[]{"<java.lang.String: boolean contains(java.lang.CharSequence)>"}, new Type[]{RefType.v("java.lang.String")}, stringsInterface);
}
static void addCMPMethods()
{
SootClass cmpInterface = Scene.v().getSootClass(PACKAGE_NAME+CMP);
String[] binops = new String[]{Jimple.CMPL, Jimple.CMPG};
PrimType[] primTypes = new PrimType[]{FloatType.v(), DoubleType.v()};
addBinaryMethods(binops, primTypes, cmpInterface);
}
static void addBitwiseMethods()
{
SootClass bitwiseInterface = Scene.v().getSootClass(PACKAGE_NAME+Bitwise);
String[] binops = new String[]{"&", "|", "^"};
PrimType[] primTypes = new PrimType[]{IntType.v(), LongType.v()};
addBinaryMethods(binops, primTypes, bitwiseInterface);
String[] shiftops = new String[]{">>", "<<", ">>>"};
for (String op : shiftops) {
String opMethodName = G.binopSymbolToMethodName.get(op);
for (Type type : primTypes) {
addMethod(opMethodName, new Type[]{type, IntType.v()}, bitwiseInterface);
}
}
}
static void addLongCMPMethod()
{
SootClass longCMPClass = Scene.v().getSootClass(PACKAGE_NAME+LongCMP);
String[] binops = new String[]{Jimple.CMP};
PrimType[] primTypes = new PrimType[]{LongType.v()};
addBinaryMethods(binops, primTypes, longCMPClass);
}
static void addIntCMPMethods()
{
SootClass intCMPClass = Scene.v().getSootClass(PACKAGE_NAME+IntCMP);
String[] binops = new String[]{">", "<", ">=", "<="};
PrimType[] primTypes = new PrimType[]{IntType.v()};
addBinaryMethods(binops, primTypes, intCMPClass);
}
static void addArrayMethods()
{
SootClass arrayClass = Scene.v().getSootClass(PACKAGE_NAME+Array);
Type[] types = new Type[]{IntType.v(),
ShortType.v(),
CharType.v(),
ByteType.v(),
BooleanType.v(),
LongType.v(),
FloatType.v(),
DoubleType.v()};
String getMethodName = G.arrayGetMethodName;
String setMethodName = G.arraySetMethodName;
String lenMethodName = G.arrayLenMethodName;
for (Type type : types) {
ArrayType arrType = ArrayType.v(type,1);
addMethod(setMethodName, new Type[]{arrType, IntType.v(), type}, arrayClass);
addMethod(getMethodName, new Type[]{arrType, IntType.v()}, arrayClass);
}
addUnaryMethod(lenMethodName, new Type[0], arrayClass);
}
static void addBinaryMethods(String[] binops, Type[] types, SootClass operatorClass)
{
for (String op : binops) {
String opMethodName = G.binopSymbolToMethodName.get(op);
System.out.println("Adding binop method " + opMethodName);
for (Type type : types) {
addMethod(opMethodName, new Type[]{type, type}, operatorClass);
}
}
}
static void addUnaryMethod(String opMethodName, Type[] paramTypes, SootClass operatorClass)
{
List paramTypesList = new ArrayList();
paramTypesList.add(G.EXPRESSION_TYPE);
for(int i = 0; i < paramTypes.length; i++){
paramTypesList.add(paramTypes[i]);
}
SootMethod method = new SootMethod(opMethodName, paramTypesList, G.EXPRESSION_TYPE, mod);
klass.addMethod(method);
G.addBody(method);
List<Local> paramLocals = G.paramLocals(method);
Local op1 = paramLocals.remove(0);
Local result = G.newLocal(G.EXPRESSION_TYPE);
Local op1Cast = G.newLocal(operatorClass.getType());
Stmt op1CastAssignment = G.jimple.newAssignStmt(op1Cast, G.jimple.newCastExpr(op1, op1Cast.getType()));
G.iff(G.neExpr(op1, NullConstant.v()), op1CastAssignment);
G.ret(NullConstant.v());
G.insertStmt(op1CastAssignment);
SootMethodRef opMethod = operatorClass.getMethodByName(opMethodName).makeRef();
InstanceInvokeExpr ie = operatorClass.isInterface() ?
G.jimple.newInterfaceInvokeExpr(op1Cast, opMethod, paramLocals) :
G.jimple.newVirtualInvokeExpr(op1Cast, opMethod, paramLocals);
G.assign(result, ie);
G.ret(result);
G.debug(method, Main.DEBUG);
}
static void addMethod(String opMethodName, Type[] paramTypes, SootClass operatorClass)
{
int numOperands = paramTypes.length;
List paramTypesList = new ArrayList();
for (int i = 0; i < numOperands; i++) {
paramTypesList.add(G.EXPRESSION_TYPE);
}
for (int i = 0; i < paramTypes.length; i++) {
paramTypesList.add(paramTypes[i]);
}
SootMethod method = new SootMethod(opMethodName, paramTypesList, G.EXPRESSION_TYPE, mod);
System.out.println("Adding " + method.getName());
klass.addMethod(method);
G.addBody(method);
List<Local> paramLocals = G.paramLocals(method);
Local op1 = paramLocals.get(0);
Local result = G.newLocal(G.EXPRESSION_TYPE);
Local op1Cast = G.newLocal(operatorClass.getType());
Local op1Concrete = paramLocals.get(numOperands);
SootClass constClass = exprConstClassFor(op1Concrete.getType());
Stmt makeExpr1 = G.jimple.newAssignStmt(op1Cast,
G.staticInvokeExpr(constClass.getMethodByName("get").makeRef(), op1Concrete));
Stmt op1CastAssignment = G.jimple.newAssignStmt(op1Cast, G.jimple.newCastExpr(op1, op1Cast.getType()));
Local locString = G.newLocal( RefType.v("java.lang.String"));
G.invoke(G.staticInvokeExpr(Scene.v().getMethod("<android.util.Log: int d(java.lang.String,java.lang.String)>").makeRef(), StringConstant.v("CONDROID"),StringConstant.v(opMethodName + " called")));
G.iff(G.neExpr(op1, NullConstant.v()), op1CastAssignment);
for(int i = 1; i < numOperands; i++){
Local operand = paramLocals.get(i);
G.iff(G.neExpr(operand, NullConstant.v()), makeExpr1);
}
G.invoke(G.staticInvokeExpr(Scene.v().getMethod("<android.util.Log: int d(java.lang.String,java.lang.String)>").makeRef(), StringConstant.v("CONDROID"),StringConstant.v(opMethodName + " returns null")));
G.ret(NullConstant.v());
G.insertStmt(op1CastAssignment);
Stmt nop = G.jimple.newNopStmt();
G.gotoo(nop);
G.insertStmt(makeExpr1);
G.insertStmt(nop);
List<Local> args = new ArrayList();
for(int i = 1; i < numOperands; i++){
Local operand = paramLocals.get(i);
Local operandConcrete = paramLocals.get(i+numOperands);
nop = G.jimple.newNopStmt();
G.iff(G.neExpr(operand, NullConstant.v()), nop);
constClass = exprConstClassFor(operandConcrete.getType());
G.assign(operand, G.staticInvokeExpr(constClass.getMethodByName("get").makeRef(), operandConcrete));
G.insertStmt(nop);
args.add(operand);
}
SootMethodRef opMethod = operatorClass.getMethodByName(opMethodName).makeRef();
InstanceInvokeExpr ie = operatorClass.isInterface() ?
G.jimple.newInterfaceInvokeExpr(op1Cast, opMethod, args) :
G.jimple.newVirtualInvokeExpr(op1Cast, opMethod, args);
G.assign(result, ie);
G.invoke(G.staticInvokeExpr(Scene.v().getMethod("<android.util.Log: int d(java.lang.String,java.lang.String)>").makeRef(), StringConstant.v("CONDROID"),StringConstant.v(opMethodName + " returns")));
G.ret(result);
G.debug(method, Main.DEBUG);
}
private static SootClass exprConstClassFor(Type type)
{
String name = null;
if (type instanceof PrimType) {
type = Type.toMachineType(type);
if (type.equals(IntType.v()))
name = "IntegerConstant";
else if (type.equals(LongType.v()))
name = "LongConstant";
else if (type.equals(FloatType.v()))
name = "FloatConstant";
else if (type.equals(DoubleType.v()))
name = "DoubleConstant";
else
assert false : type;
name = "integer."+name;
}
else if (type instanceof RefType) {
if (((RefType) type).getSootClass().getName().equals("java.lang.String")) {
name = "string.StringConstant";
} else {
name = "integer.RefConstant";
}
}
else if (type instanceof ArrayType) {
if (type.equals(ArrayType.v(BooleanType.v(),1)))
name = "BooleanArrayConstant";
else if (type.equals(ArrayType.v(CharType.v(),1)))
name = "CharArrayConstant";
else if (type.equals(ArrayType.v(ByteType.v(),1)))
name = "ByteArrayConstant";
else if (type.equals(ArrayType.v(ShortType.v(),1)))
name = "ShortArrayConstant";
else if (type.equals(ArrayType.v(IntType.v(),1)))
name = "IntegerArrayConstant";
else if (type.equals(ArrayType.v(LongType.v(),1)))
name = "LongArrayConstant";
else if (type.equals(ArrayType.v(FloatType.v(),1)))
name = "FloatArrayConstant";
else if (type.equals(ArrayType.v(DoubleType.v(),1)))
name = "DoubleArrayConstant";
else
assert false : type;
name = "array."+name;
}
else
assert false : type;
return Scene.v().getSootClass(PACKAGE_NAME+name);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.clerezza.sparql.query.impl;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.IRI;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.sparql.StringQuerySerializer;
import org.apache.clerezza.sparql.query.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
/**
* This class implements abstract methods of {@link StringQuerySerializer}
* to serialize specific {@link Query} types.
*
* @author hasan
*/
public class SimpleStringQuerySerializer extends StringQuerySerializer {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public String serialize(SelectQuery selectQuery) {
StringBuffer s = new StringBuffer("SELECT ");
if (selectQuery.isDistinct()) {
s.append("DISTINCT\n");
}
if (selectQuery.isReduced()) {
s.append("REDUCED\n");
}
if (selectQuery.isSelectAll()) {
s.append("*");
} else {
for (Variable v : selectQuery.getSelection()) {
appendVariable(s, v);
s.append(" ");
}
}
s.append("\n");
appendDataSet(s, (SimpleQuery) selectQuery);
appendWhere(s, (SimpleQuery) selectQuery);
appendModifier(s, (SimpleQueryWithSolutionModifier) selectQuery);
return s.toString();
}
private void appendVariable(StringBuffer s, Variable v) {
s.append("?").append(v.getName());
}
private void appendDataSet(StringBuffer s, SimpleQuery q) {
DataSet dataSet = q.getDataSet();
if (dataSet != null) {
for (IRI dg : dataSet.getDefaultGraphs()) {
s.append("FROM ").append(dg.toString()).append("\n");
}
for (IRI ng : dataSet.getNamedGraphs()) {
s.append("FROM NAMED ").append(ng.toString()).append("\n");
}
}
}
private void appendWhere(StringBuffer s, SimpleQuery q) {
GroupGraphPattern queryPattern = q.getQueryPattern();
if (queryPattern == null) {
return;
}
s.append("WHERE\n");
appendGroupGraphPattern(s, q.getQueryPattern());
}
private void appendGroupGraphPattern(StringBuffer s,
GroupGraphPattern groupGraphPattern) {
s.append("{ ");
for (GraphPattern graphPattern : groupGraphPattern.getGraphPatterns()) {
appendGraphPattern(s, graphPattern);
}
for (Expression e : groupGraphPattern.getFilter()) {
boolean brackettedExpr = !((e instanceof BuiltInCall)
|| (e instanceof FunctionCall));
s.append("FILTER ");
if (brackettedExpr) {
s.append("(");
}
appendExpression(s, e);
if (brackettedExpr) {
s.append(")");
}
s.append("\n");
}
s.append("} ");
}
private void appendGraphPattern(StringBuffer s, GraphPattern graphPattern) {
if (graphPattern instanceof BasicGraphPattern) {
appendTriplePatterns(s,
((BasicGraphPattern) graphPattern).getTriplePatterns());
} else if (graphPattern instanceof GroupGraphPattern) {
appendGroupGraphPattern(s, (GroupGraphPattern) graphPattern);
} else if (graphPattern instanceof OptionalGraphPattern) {
appendGraphPattern(s,
((OptionalGraphPattern) graphPattern).getMainGraphPattern());
s.append(" OPTIONAL ");
appendGroupGraphPattern(s,
((OptionalGraphPattern) graphPattern).getOptionalGraphPattern());
} else if (graphPattern instanceof AlternativeGraphPattern) {
List<GroupGraphPattern> alternativeGraphPatterns =
((AlternativeGraphPattern) graphPattern).getAlternativeGraphPatterns();
if ((alternativeGraphPatterns != null) &&
(!alternativeGraphPatterns.isEmpty())) {
appendGroupGraphPattern(s, alternativeGraphPatterns.get(0));
int size = alternativeGraphPatterns.size();
int i = 1;
while (i < size) {
s.append(" UNION ");
appendGroupGraphPattern(s, alternativeGraphPatterns.get(i));
i++;
}
}
} else if (graphPattern instanceof GraphGraphPattern) {
s.append("ImmutableGraph ");
appendResourceOrVariable(s, ((GraphGraphPattern) graphPattern).getGraph());
s.append(" ");
appendGroupGraphPattern(s, ((GraphGraphPattern) graphPattern).getGroupGraphPattern());
} else {
logger.warn("Unsupported GraphPattern {}", graphPattern.getClass());
}
}
private void appendTriplePatterns(StringBuffer s,
Set<TriplePattern> triplePatterns) {
for (TriplePattern p : triplePatterns) {
appendResourceOrVariable(s, p.getSubject());
s.append(" ");
appendResourceOrVariable(s, p.getPredicate());
s.append(" ");
appendResourceOrVariable(s, p.getObject());
s.append(" .\n");
}
}
private void appendResourceOrVariable(StringBuffer s, ResourceOrVariable n) {
if (n.isVariable()) {
appendVariable(s, n.getVariable());
} else {
RDFTerm r = n.getResource();
if (r instanceof BlankNode) {
s.append("_:").append(r.toString().replace("@", "."));
} else {
s.append(r.toString());
}
}
}
private void appendExpression(StringBuffer s, Expression e) {
if (e instanceof Variable) {
appendVariable(s, (Variable) e);
} else if (e instanceof BinaryOperation) {
BinaryOperation bo = (BinaryOperation) e;
s.append("(");
appendExpression(s, bo.getLhsOperand());
s.append(") ").append(bo.getOperatorString()).append(" (");
appendExpression(s, bo.getRhsOperand());
s.append(")");
} else if (e instanceof UnaryOperation) {
UnaryOperation uo = (UnaryOperation) e;
s.append(uo.getOperatorString()).append(" (");
appendExpression(s, uo.getOperand());
s.append(")");
} else if (e instanceof BuiltInCall) {
BuiltInCall b = (BuiltInCall) e;
appendCall(s, b.getName(), b.getArguements());
} else if (e instanceof FunctionCall) {
FunctionCall f = (FunctionCall) e;
appendCall(s, f.getName().getUnicodeString(), f.getArguements());
} else if (e instanceof LiteralExpression) {
appendLiteralExpression(s, (LiteralExpression) e);
} else if (e instanceof UriRefExpression) {
s.append(((UriRefExpression) e).getUriRef().toString());
}
}
private void appendCall(StringBuffer s, String name, List<Expression> expr) {
s.append(name).append("(");
for (Expression e : expr) {
appendExpression(s, e);
s.append(",");
}
if (expr.isEmpty()) {
s.append(")");
} else {
s.setCharAt(s.length()-1, ')');
}
}
private void appendLiteralExpression(StringBuffer s, LiteralExpression le) {
s.append(le.getLiteral().toString());
}
private void appendModifier(StringBuffer s, SimpleQueryWithSolutionModifier q) {
List<OrderCondition> orderConditions = q.getOrderConditions();
if ((orderConditions != null) && (!orderConditions.isEmpty())) {
s.append("ORDER BY ");
for (OrderCondition oc : orderConditions) {
appendOrderCondition(s, oc);
s.append("\n");
}
}
if (q.getOffset() > 0) {
s.append("OFFSET ").append(q.getOffset()).append("\n");
}
if (q.getLimit() >= 0) {
s.append("LIMIT ").append(q.getLimit()).append("\n");
}
}
private void appendOrderCondition(StringBuffer s, OrderCondition oc) {
if (!oc.isAscending()) {
s.append("DESC(");
}
appendExpression(s, oc.getExpression());
if (!oc.isAscending()) {
s.append(")");
}
s.append(" ");
}
@Override
public String serialize(ConstructQuery constructQuery) {
StringBuffer s = new StringBuffer("CONSTRUCT\n");
Set<TriplePattern> triplePatterns = constructQuery.getConstructTemplate();
s.append("{ ");
if (triplePatterns != null && !triplePatterns.isEmpty()) {
appendTriplePatterns(s, triplePatterns);
}
s.append("}\n");
appendDataSet(s, (SimpleQuery) constructQuery);
appendWhere(s, (SimpleQuery) constructQuery);
appendModifier(s, (SimpleQueryWithSolutionModifier) constructQuery);
return s.toString();
}
@Override
public String serialize(DescribeQuery describeQuery) {
StringBuffer s = new StringBuffer("DESCRIBE\n");
if (describeQuery.isDescribeAll()) {
s.append("*");
} else {
for (ResourceOrVariable n : describeQuery.getResourcesToDescribe()) {
appendResourceOrVariable(s, n);
s.append(" ");
}
}
appendDataSet(s, (SimpleQuery) describeQuery);
appendWhere(s, (SimpleQuery) describeQuery);
appendModifier(s, (SimpleQueryWithSolutionModifier) describeQuery);
return s.toString();
}
@Override
public String serialize(AskQuery askQuery) {
StringBuffer s = new StringBuffer("ASK\n");
appendDataSet(s, (SimpleQuery) askQuery);
appendWhere(s, (SimpleQuery) askQuery);
return s.toString();
}
}
| |
/*
* Phone.com API
* This is a Phone.com api Swagger definition
*
* OpenAPI spec version: 1.0.0
* Contact: apisupport@phone.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.swagger.client.model;
import java.util.Objects;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.client.model.Recipient;
import java.util.ArrayList;
import java.util.List;
import org.joda.time.DateTime;
/**
* The Full SMS Object is identical to the SMS Summary Object. See above for details.
*/
@ApiModel(description = "The Full SMS Object is identical to the SMS Summary Object. See above for details.")
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2017-03-21T10:43:19.319+01:00")
public class SmsFull {
@SerializedName("id")
private String id = null;
@SerializedName("from")
private String from = null;
@SerializedName("to")
private List<Recipient> to = new ArrayList<Recipient>();
@SerializedName("direction")
private String direction = null;
@SerializedName("created_epoch")
private Integer createdEpoch = null;
@SerializedName("created_at")
private DateTime createdAt = null;
@SerializedName("text")
private String text = null;
public SmsFull id(String id) {
this.id = id;
return this;
}
/**
* Unique SMS ID. Read-only.
* @return id
**/
@ApiModelProperty(example = "null", required = true, value = "Unique SMS ID. Read-only.")
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public SmsFull from(String from) {
this.from = from;
return this;
}
/**
* Caller ID number to display on the incoming/outgoing SMS message. For an outgoing message, it must be a phone number associated with your Phone.com account.
* @return from
**/
@ApiModelProperty(example = "null", required = true, value = "Caller ID number to display on the incoming/outgoing SMS message. For an outgoing message, it must be a phone number associated with your Phone.com account.")
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
public SmsFull to(List<Recipient> to) {
this.to = to;
return this;
}
public SmsFull addToItem(Recipient toItem) {
this.to.add(toItem);
return this;
}
/**
* An array of SMS recipients.
* @return to
**/
@ApiModelProperty(example = "null", required = true, value = "An array of SMS recipients.")
public List<Recipient> getTo() {
return to;
}
public void setTo(List<Recipient> to) {
this.to = to;
}
public SmsFull direction(String direction) {
this.direction = direction;
return this;
}
/**
* Direction of SMS. 'in' for Incoming messages, 'out' for Outgoing messages.
* @return direction
**/
@ApiModelProperty(example = "null", required = true, value = "Direction of SMS. 'in' for Incoming messages, 'out' for Outgoing messages.")
public String getDirection() {
return direction;
}
public void setDirection(String direction) {
this.direction = direction;
}
public SmsFull createdEpoch(Integer createdEpoch) {
this.createdEpoch = createdEpoch;
return this;
}
/**
* Unix time stamp representing the UTC time that the object was created in the Phone.com API system.
* @return createdEpoch
**/
@ApiModelProperty(example = "null", required = true, value = "Unix time stamp representing the UTC time that the object was created in the Phone.com API system.")
public Integer getCreatedEpoch() {
return createdEpoch;
}
public void setCreatedEpoch(Integer createdEpoch) {
this.createdEpoch = createdEpoch;
}
public SmsFull createdAt(DateTime createdAt) {
this.createdAt = createdAt;
return this;
}
/**
* Date string representing the UTC time that the object was created in the Phone.com API system.
* @return createdAt
**/
@ApiModelProperty(example = "null", required = true, value = "Date string representing the UTC time that the object was created in the Phone.com API system.")
public DateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(DateTime createdAt) {
this.createdAt = createdAt;
}
public SmsFull text(String text) {
this.text = text;
return this;
}
/**
* Body of the SMS text
* @return text
**/
@ApiModelProperty(example = "null", required = true, value = "Body of the SMS text")
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SmsFull smsFull = (SmsFull) o;
return Objects.equals(this.id, smsFull.id) &&
Objects.equals(this.from, smsFull.from) &&
Objects.equals(this.to, smsFull.to) &&
Objects.equals(this.direction, smsFull.direction) &&
Objects.equals(this.createdEpoch, smsFull.createdEpoch) &&
Objects.equals(this.createdAt, smsFull.createdAt) &&
Objects.equals(this.text, smsFull.text);
}
@Override
public int hashCode() {
return Objects.hash(id, from, to, direction, createdEpoch, createdAt, text);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class SmsFull {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" from: ").append(toIndentedString(from)).append("\n");
sb.append(" to: ").append(toIndentedString(to)).append("\n");
sb.append(" direction: ").append(toIndentedString(direction)).append("\n");
sb.append(" createdEpoch: ").append(toIndentedString(createdEpoch)).append("\n");
sb.append(" createdAt: ").append(toIndentedString(createdAt)).append("\n");
sb.append(" text: ").append(toIndentedString(text)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.client.gateway.local;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.client.cli.DefaultCLI;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
import org.apache.flink.client.python.PythonFunctionFactory;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.api.config.OptimizerConfigOptions;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.client.config.Environment;
import org.apache.flink.table.client.config.entries.CatalogEntry;
import org.apache.flink.table.client.gateway.SessionContext;
import org.apache.flink.table.client.gateway.utils.DummyTableSourceFactory;
import org.apache.flink.table.client.gateway.utils.EnvironmentFileUtil;
import org.apache.flink.table.factories.CatalogFactory;
import org.apache.flink.table.functions.python.PythonScalarFunction;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.TimestampKind;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.flink.util.StringUtils;
import org.apache.commons.cli.Options;
import org.junit.Test;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Test for {@link ExecutionContext}.
*/
public class ExecutionContextTest {
private static final String DEFAULTS_ENVIRONMENT_FILE = "test-sql-client-defaults.yaml";
private static final String MODULES_ENVIRONMENT_FILE = "test-sql-client-modules.yaml";
public static final String CATALOGS_ENVIRONMENT_FILE = "test-sql-client-catalogs.yaml";
private static final String STREAMING_ENVIRONMENT_FILE = "test-sql-client-streaming.yaml";
private static final String CONFIGURATION_ENVIRONMENT_FILE = "test-sql-client-configuration.yaml";
private static final String FUNCTION_ENVIRONMENT_FILE = "test-sql-client-python-functions.yaml";
@Test
public void testExecutionConfig() throws Exception {
final ExecutionContext<?> context = createDefaultExecutionContext();
final ExecutionConfig config = context.getExecutionConfig();
assertEquals(99, config.getAutoWatermarkInterval());
final RestartStrategies.RestartStrategyConfiguration restartConfig = config.getRestartStrategy();
assertTrue(restartConfig instanceof RestartStrategies.FailureRateRestartStrategyConfiguration);
final RestartStrategies.FailureRateRestartStrategyConfiguration failureRateStrategy =
(RestartStrategies.FailureRateRestartStrategyConfiguration) restartConfig;
assertEquals(10, failureRateStrategy.getMaxFailureRate());
assertEquals(99_000, failureRateStrategy.getFailureInterval().toMilliseconds());
assertEquals(1_000, failureRateStrategy.getDelayBetweenAttemptsInterval().toMilliseconds());
}
@Test
public void testModules() throws Exception {
final ExecutionContext<?> context = createModuleExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
Set<String> allModules = new HashSet<>(Arrays.asList(tableEnv.listModules()));
assertEquals(4, allModules.size());
assertEquals(
new HashSet<>(
Arrays.asList(
"core",
"mymodule",
"myhive",
"myhive2")
),
allModules
);
}
@Test
public void testCatalogs() throws Exception {
final String inmemoryCatalog = "inmemorycatalog";
final String hiveCatalog = "hivecatalog";
final String hiveDefaultVersionCatalog = "hivedefaultversion";
final ExecutionContext<?> context = createCatalogExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
assertEquals(inmemoryCatalog, tableEnv.getCurrentCatalog());
assertEquals("mydatabase", tableEnv.getCurrentDatabase());
Catalog catalog = tableEnv.getCatalog(hiveCatalog).orElse(null);
assertNotNull(catalog);
assertTrue(catalog instanceof HiveCatalog);
assertEquals("2.3.4", ((HiveCatalog) catalog).getHiveVersion());
catalog = tableEnv.getCatalog(hiveDefaultVersionCatalog).orElse(null);
assertNotNull(catalog);
assertTrue(catalog instanceof HiveCatalog);
// make sure we have assigned a default hive version
assertFalse(StringUtils.isNullOrWhitespaceOnly(((HiveCatalog) catalog).getHiveVersion()));
tableEnv.useCatalog(hiveCatalog);
assertEquals(hiveCatalog, tableEnv.getCurrentCatalog());
Set<String> allCatalogs = new HashSet<>(Arrays.asList(tableEnv.listCatalogs()));
assertEquals(6, allCatalogs.size());
assertEquals(
new HashSet<>(
Arrays.asList(
"default_catalog",
inmemoryCatalog,
hiveCatalog,
hiveDefaultVersionCatalog,
"catalog1",
"catalog2")
),
allCatalogs
);
context.close();
}
@Test
public void testDatabases() throws Exception {
final String hiveCatalog = "hivecatalog";
final ExecutionContext<?> context = createCatalogExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
assertEquals(1, tableEnv.listDatabases().length);
assertEquals("mydatabase", tableEnv.listDatabases()[0]);
tableEnv.useCatalog(hiveCatalog);
assertEquals(2, tableEnv.listDatabases().length);
assertEquals(
new HashSet<>(
Arrays.asList(
HiveCatalog.DEFAULT_DB,
DependencyTest.TestHiveCatalogFactory.ADDITIONAL_TEST_DATABASE)
),
new HashSet<>(Arrays.asList(tableEnv.listDatabases()))
);
tableEnv.useCatalog(hiveCatalog);
assertEquals(HiveCatalog.DEFAULT_DB, tableEnv.getCurrentDatabase());
tableEnv.useDatabase(DependencyTest.TestHiveCatalogFactory.ADDITIONAL_TEST_DATABASE);
assertEquals(DependencyTest.TestHiveCatalogFactory.ADDITIONAL_TEST_DATABASE, tableEnv.getCurrentDatabase());
context.close();
}
@Test
public void testFunctions() throws Exception {
final ExecutionContext<?> context = createDefaultExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
final String[] expected = new String[]{"scalarudf", "tableudf", "aggregateudf"};
final String[] actual = tableEnv.listUserDefinedFunctions();
Arrays.sort(expected);
Arrays.sort(actual);
assertArrayEquals(expected, actual);
}
@Test
public void testPythonFunction() throws Exception {
PythonFunctionFactory pythonFunctionFactory = PythonFunctionFactory.PYTHON_FUNCTION_FACTORY_REF.get();
PythonFunctionFactory testFunctionFactory = (moduleName, objectName) ->
new PythonScalarFunction(null, null, null, null, null, false, null);
try {
PythonFunctionFactory.PYTHON_FUNCTION_FACTORY_REF.set(testFunctionFactory);
ExecutionContext context = createPythonFunctionExecutionContext();
final String[] expected = new String[]{"pythonudf"};
final String[] actual = context.getTableEnvironment().listUserDefinedFunctions();
assertArrayEquals(expected, actual);
} finally {
PythonFunctionFactory.PYTHON_FUNCTION_FACTORY_REF.set(pythonFunctionFactory);
}
}
@Test
public void testTables() throws Exception {
final ExecutionContext<?> context = createDefaultExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
assertArrayEquals(
new String[]{"TableNumber1", "TableNumber2", "TableSourceSink", "TestView1", "TestView2"},
tableEnv.listTables());
}
@Test
public void testTemporalTables() throws Exception {
final ExecutionContext<?> context = createStreamingExecutionContext();
final StreamTableEnvironment tableEnv = (StreamTableEnvironment) context.getTableEnvironment();
assertArrayEquals(
new String[]{"EnrichmentSource", "HistorySource", "HistoryView", "TemporalTableUsage"},
tableEnv.listTables());
assertArrayEquals(
new String[]{"sourcetemporaltable", "viewtemporaltable"},
tableEnv.listUserDefinedFunctions());
assertArrayEquals(
new String[]{"integerField", "stringField", "rowtimeField", "integerField0", "stringField0", "rowtimeField0"},
tableEnv.from("TemporalTableUsage").getSchema().getFieldNames());
// Please delete this test after removing registerTableSourceInternal in SQL-CLI.
TableSchema tableSchema = tableEnv.from("EnrichmentSource").getSchema();
LogicalType timestampType = tableSchema.getFieldDataTypes()[2].getLogicalType();
assertTrue(timestampType instanceof TimestampType);
assertEquals(TimestampKind.ROWTIME, ((TimestampType) timestampType).getKind());
}
@Test
public void testConfiguration() throws Exception {
final ExecutionContext<?> context = createConfigurationExecutionContext();
final TableEnvironment tableEnv = context.getTableEnvironment();
assertEquals(
100,
tableEnv.getConfig().getConfiguration().getInteger(
ExecutionConfigOptions.TABLE_EXEC_SORT_DEFAULT_LIMIT));
assertTrue(
tableEnv.getConfig().getConfiguration().getBoolean(
ExecutionConfigOptions.TABLE_EXEC_SPILL_COMPRESSION_ENABLED));
assertEquals(
"128kb",
tableEnv.getConfig().getConfiguration().getString(
ExecutionConfigOptions.TABLE_EXEC_SPILL_COMPRESSION_BLOCK_SIZE));
assertTrue(
tableEnv.getConfig().getConfiguration().getBoolean(
OptimizerConfigOptions.TABLE_OPTIMIZER_JOIN_REORDER_ENABLED));
// these options are not modified and should be equal to their default value
assertEquals(
ExecutionConfigOptions.TABLE_EXEC_SORT_ASYNC_MERGE_ENABLED.defaultValue(),
tableEnv.getConfig().getConfiguration().getBoolean(
ExecutionConfigOptions.TABLE_EXEC_SORT_ASYNC_MERGE_ENABLED));
assertEquals(
ExecutionConfigOptions.TABLE_EXEC_SHUFFLE_MODE.defaultValue(),
tableEnv.getConfig().getConfiguration().getString(
ExecutionConfigOptions.TABLE_EXEC_SHUFFLE_MODE));
assertEquals(
OptimizerConfigOptions.TABLE_OPTIMIZER_BROADCAST_JOIN_THRESHOLD.defaultValue().longValue(),
tableEnv.getConfig().getConfiguration().getLong(
OptimizerConfigOptions.TABLE_OPTIMIZER_BROADCAST_JOIN_THRESHOLD));
}
@Test
public void testInitCatalogs() throws Exception{
final Map<String, String> replaceVars = createDefaultReplaceVars();
Environment env = EnvironmentFileUtil.parseModified(DEFAULTS_ENVIRONMENT_FILE, replaceVars);
Map<String, Object> catalogProps = new HashMap<>();
catalogProps.put("name", "test");
catalogProps.put("type", "test_cl_catalog");
env.getCatalogs().clear();
env.getCatalogs().put("test", CatalogEntry.create(catalogProps));
Configuration flinkConfig = new Configuration();
ExecutionContext.builder(env,
new SessionContext("test-session", new Environment()),
Collections.emptyList(),
flinkConfig,
new DefaultClusterClientServiceLoader(),
new Options(),
Collections.singletonList(new DefaultCLI(flinkConfig))).build();
}
@SuppressWarnings("unchecked")
private <T> ExecutionContext<T> createExecutionContext(String file, Map<String, String> replaceVars) throws Exception {
final Environment env = EnvironmentFileUtil.parseModified(
file,
replaceVars);
final Configuration flinkConfig = new Configuration();
return (ExecutionContext<T>) ExecutionContext.builder(
env,
new SessionContext("test-session", new Environment()),
Collections.emptyList(),
flinkConfig,
new DefaultClusterClientServiceLoader(),
new Options(),
Collections.singletonList(new DefaultCLI(flinkConfig)))
.build();
}
private Map<String, String> createDefaultReplaceVars() {
Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_PLANNER", "old");
replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
replaceVars.put("$VAR_RESULT_MODE", "changelog");
replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
replaceVars.put("$VAR_MAX_ROWS", "100");
replaceVars.put("$VAR_RESTART_STRATEGY_TYPE", "failure-rate");
return replaceVars;
}
private <T> ExecutionContext<T> createDefaultExecutionContext() throws Exception {
final Map<String, String> replaceVars = createDefaultReplaceVars();
return createExecutionContext(DEFAULTS_ENVIRONMENT_FILE, replaceVars);
}
private <T> ExecutionContext<T> createModuleExecutionContext() throws Exception {
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_PLANNER", "old");
replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
replaceVars.put("$VAR_RESULT_MODE", "changelog");
replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
replaceVars.put("$VAR_MAX_ROWS", "100");
return createExecutionContext(MODULES_ENVIRONMENT_FILE, replaceVars);
}
private <T> ExecutionContext<T> createCatalogExecutionContext() throws Exception {
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_PLANNER", "old");
replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
replaceVars.put("$VAR_RESULT_MODE", "changelog");
replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
replaceVars.put("$VAR_MAX_ROWS", "100");
return createExecutionContext(CATALOGS_ENVIRONMENT_FILE, replaceVars);
}
private <T> ExecutionContext<T> createStreamingExecutionContext() throws Exception {
final Map<String, String> replaceVars = new HashMap<>();
replaceVars.put("$VAR_CONNECTOR_TYPE", DummyTableSourceFactory.CONNECTOR_TYPE_VALUE);
replaceVars.put("$VAR_CONNECTOR_PROPERTY", DummyTableSourceFactory.TEST_PROPERTY);
replaceVars.put("$VAR_CONNECTOR_PROPERTY_VALUE", "");
return createExecutionContext(STREAMING_ENVIRONMENT_FILE, replaceVars);
}
private <T> ExecutionContext<T> createConfigurationExecutionContext() throws Exception {
return createExecutionContext(CONFIGURATION_ENVIRONMENT_FILE, new HashMap<>());
}
private <T> ExecutionContext<T> createPythonFunctionExecutionContext() throws Exception {
return createExecutionContext(FUNCTION_ENVIRONMENT_FILE, new HashMap<>());
}
// a catalog that requires the thread context class loader to be a user code classloader during construction and opening
private static class TestClassLoaderCatalog extends GenericInMemoryCatalog {
private static final Class parentFirstCL = FlinkUserCodeClassLoaders.parentFirst(
new URL[0], TestClassLoaderCatalog.class.getClassLoader()).getClass();
private static final Class childFirstCL = FlinkUserCodeClassLoaders.childFirst(
new URL[0], TestClassLoaderCatalog.class.getClassLoader(), new String[0]).getClass();
TestClassLoaderCatalog(String name) {
super(name);
verifyUserClassLoader();
}
@Override
public void open() {
verifyUserClassLoader();
super.open();
}
private void verifyUserClassLoader() {
ClassLoader contextLoader = Thread.currentThread().getContextClassLoader();
assertTrue(parentFirstCL.isInstance(contextLoader) || childFirstCL.isInstance(contextLoader));
}
}
/**
* Factory to create TestClassLoaderCatalog.
*/
public static class TestClassLoaderCatalogFactory implements CatalogFactory {
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
return new TestClassLoaderCatalog("test_cl");
}
@Override
public Map<String, String> requiredContext() {
Map<String, String> context = new HashMap<>();
context.put("type", "test_cl_catalog");
return context;
}
@Override
public List<String> supportedProperties() {
return Collections.emptyList();
}
}
}
| |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.agent.plugin;
import scouter.agent.Configure;
import scouter.agent.Logger;
import scouter.agent.trace.HookArgs;
import scouter.agent.trace.HookReturn;
import scouter.agent.trace.TraceSQL;
import javassist.*;
import scouter.lang.pack.PerfCounterPack;
import scouter.util.FileUtil;
import scouter.util.Hexa32;
import scouter.util.StringUtil;
import scouter.util.ThreadUtil;
import java.io.BufferedReader;
import java.io.File;
import java.io.StringReader;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
public class PluginLoader extends Thread {
private static PluginLoader instance;
public synchronized static PluginLoader getInstance() {
if (instance == null) {
instance = new PluginLoader();
instance.setDaemon(true);
instance.setName(ThreadUtil.getName(PluginLoader.class));
instance.start();
}
return instance;
}
public void run() {
while (true) {
try {
File root = Configure.getInstance().plugin_dir;
reloadIfModified(root);
} catch (Throwable t) {
Logger.println("A160", t.toString());
}
ThreadUtil.sleep(5000);
}
}
private void reloadIfModified(File root) {
File script = new File(root, "service.plug");
if (script.canRead() == false) {
PluginAppServiceTrace.plugIn = null;
} else {
if (PluginAppServiceTrace.plugIn == null
|| PluginAppServiceTrace.plugIn.lastModified != script.lastModified()) {
PluginAppServiceTrace.plugIn = createAppService(script);
}
}
script = new File(root, "httpservice.plug");
if (script.canRead() == false) {
PluginHttpServiceTrace.plugIn = null;
} else {
if (PluginHttpServiceTrace.plugIn == null
|| PluginHttpServiceTrace.plugIn.lastModified != script.lastModified()) {
PluginHttpServiceTrace.plugIn = createHttpService(script);
}
}
script = new File(root, "backthread.plug");
if (script.canRead() == false) {
PluginBackThreadTrace.plugIn = null;
} else {
if (PluginBackThreadTrace.plugIn == null
|| PluginBackThreadTrace.plugIn.lastModified != script.lastModified()) {
PluginBackThreadTrace.plugIn = createAppService(script);
}
}
script = new File(root, "capture.plug");
if (script.canRead() == false) {
PluginCaptureTrace.plugIn = null;
} else {
if (PluginCaptureTrace.plugIn == null || PluginCaptureTrace.plugIn.lastModified != script.lastModified()) {
PluginCaptureTrace.plugIn = createICaptureTrace(script);
}
}
script = new File(root, "springControllerCapture.plug");
if (script.canRead() == false) {
PluginSpringControllerCaptureTrace.plugIn = null;
} else {
if (PluginSpringControllerCaptureTrace.plugIn == null || PluginSpringControllerCaptureTrace.plugIn.lastModified != script.lastModified()) {
PluginSpringControllerCaptureTrace.plugIn = createICaptureTrace(script);
}
}
script = new File(root, "jdbcpool.plug");
if (script.canRead() == false) {
PluginJdbcPoolTrace.plugIn = null;
} else {
if (PluginJdbcPoolTrace.plugIn == null
|| PluginJdbcPoolTrace.plugIn.lastModified != script.lastModified()) {
PluginJdbcPoolTrace.plugIn = createIJdbcPool(script);
if (PluginJdbcPoolTrace.plugIn != null) {
TraceSQL.clearUrlMap();
}
}
}
script = new File(root, "httpcall.plug");
if (script.canRead() == false) {
PluginHttpCallTrace.plugIn = null;
} else {
if (PluginHttpCallTrace.plugIn == null
|| PluginHttpCallTrace.plugIn.lastModified != script.lastModified()) {
PluginHttpCallTrace.plugIn = createIHttpCall(script);
}
}
script = new File(root, "counter.plug");
if (script.canRead() == false) {
PluginCounter.plugIn = null;
} else {
if (PluginCounter.plugIn == null
|| PluginCounter.plugIn.lastModified != script.lastModified()) {
PluginCounter.plugIn = createCounter(script);
}
}
}
private long IHttpServiceCompile;
private AbstractHttpService createHttpService(File script) {
if (IHttpServiceCompile == script.lastModified())
return null;
IHttpServiceCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractHttpService.class.getName();
String className = "scouter.agent.plugIn.impl.HttpServiceImpl";
String METHOD_START = "start";
String METHOD_END = "end";
String METHOD_REJECT = "reject";
String SIGNATURE = nativeName(WrContext.class) + nativeName(WrRequest.class) + nativeName(WrResponse.class);
String METHOD_P1 = WrContext.class.getName();
String METHOD_P2 = WrRequest.class.getName();
String METHOD_P3 = WrResponse.class.getName();
if (bodyTable.containsKey(METHOD_START) == false)
throw new CannotCompileException("no method body: " + METHOD_START);
if (bodyTable.containsKey(METHOD_END) == false)
throw new CannotCompileException("no method body: " + METHOD_END);
if (bodyTable.containsKey(METHOD_REJECT) == false)
throw new CannotCompileException("no method body: " + METHOD_REJECT);
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
CtClass cc = cp.get(superName);
CtClass impl = null;
CtMethod method_start = null;
CtMethod method_end = null;
CtMethod method_reject = null;
StringBuffer sb = null;
try {
impl = cp.get(className);
impl.defrost();
// START METHOD
method_start = impl.getMethod(METHOD_START, "(" + SIGNATURE + ")V");
// END METHOD
method_end = impl.getMethod(METHOD_END, "(" + SIGNATURE + ")V");
// REJECT METHOD
method_reject = impl.getMethod(METHOD_REJECT, "(" + SIGNATURE + ")Z");
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
StringBuffer sb1 = new StringBuffer();
sb1.append(METHOD_P1).append(" p1").append(",");
sb1.append(METHOD_P2).append(" p2").append(",");
sb1.append(METHOD_P3).append(" p3");
// START METHOD
sb = new StringBuffer();
sb.append("public void ").append(METHOD_START).append("(").append(sb1).append("){}");
method_start = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_start);
// END METHOD
sb = new StringBuffer();
sb.append("public void ").append(METHOD_END).append("(").append(sb1).append("){}");
method_end = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_end);
// REJECT METHOD
sb = new StringBuffer();
sb.append("public boolean ").append(METHOD_REJECT).append("(").append(sb1).append("){return false;}");
method_reject = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_reject);
}
StringBuffer bodyPrefix = new StringBuffer();
bodyPrefix.append("{");
bodyPrefix.append(METHOD_P1).append(" $ctx=$1;");
bodyPrefix.append(METHOD_P2).append(" $req=$2;");
bodyPrefix.append(METHOD_P3).append(" $res=$3;");
method_start.setBody(
new StringBuffer().append(bodyPrefix).append(bodyTable.get(METHOD_START)).append("\n}").toString());
method_end.setBody(
new StringBuffer().append(bodyPrefix).append(bodyTable.get(METHOD_END)).append("\n}").toString());
method_reject.setBody(
new StringBuffer().append(bodyPrefix).append(bodyTable.get(METHOD_REJECT)).append("\n}").toString());
Class c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractHttpService plugin = (AbstractHttpService) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractHttpService.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Throwable e) {
Logger.println("A161", e);
}
return null;
}
private HashMap<String, StringBuffer> loadFileText(File script) {
StringBuffer sb = new StringBuffer();
HashMap<String, StringBuffer> result = new HashMap<String, StringBuffer>();
String txt = new String(FileUtil.readAll(script));
try {
BufferedReader r = new BufferedReader(new StringReader(txt));
while (true) {
String line = StringUtil.trim(r.readLine());
if (line == null)
break;
if (line.startsWith("[") && line.endsWith("]")) {
sb = new StringBuffer();
result.put(line.substring(1, line.length() - 1), sb);
} else {
sb.append(line).append("\n");
}
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
private long IServiceTraceCompile;
private AbstractAppService createAppService(File script) {
if (IServiceTraceCompile == script.lastModified())
return null;
IServiceTraceCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractAppService.class.getName();
String className = "scouter.agent.plugin.impl.ServiceTraceImpl";
String START = "start";
String START_SIG = "(" + nativeName(WrContext.class) + nativeName(HookArgs.class) + ")V";
String START_P1 = WrContext.class.getName();
String START_P2 = HookArgs.class.getName();
StringBuffer START_BODY = bodyTable.get(START);
if (START_BODY == null)
throw new CannotCompileException("no method body: " + START);
String END = "end";
String END_SIG = "(" + nativeName(WrContext.class) + ")V";
String END_P1 = WrContext.class.getName();
StringBuffer END_BODY = bodyTable.get(END);
if (END_BODY == null)
throw new CannotCompileException("no method body: " + END);
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
Class c = null;
CtClass cc = cp.get(superName);
CtClass impl = null;
StringBuffer sb;
CtMethod method_start = null;
CtMethod method_end = null;
try {
impl = cp.get(className);
impl.defrost();
// START METHOD
method_start = impl.getMethod(START, START_SIG);
// END METHOD
method_end = impl.getMethod(END, END_SIG);
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
// START METHOD
sb = new StringBuffer();
sb.append("public void ").append(START).append("(");
sb.append(START_P1).append(" p1 ").append(",");
sb.append(START_P2).append(" p2");
sb.append("){}");
method_start = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_start);
// END METHOD
sb = new StringBuffer();
sb.append("public void ").append(END).append("(");
sb.append(END_P1).append(" p1 ");
sb.append("){}");
method_end = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_end);
}
sb = new StringBuffer();
sb.append("{");
sb.append(START_P1).append(" $ctx=$1;");
sb.append(START_P2).append(" $hook=$2;");
sb.append(START_BODY);
sb.append("\n}");
method_start.setBody(sb.toString());
sb = new StringBuffer();
sb.append("{");
sb.append(END_P1).append(" $ctx=$1;");
sb.append(END_BODY);
sb.append("\n}");
method_end.setBody(sb.toString());
c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractAppService plugin = (AbstractAppService) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractAppService.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Exception e) {
Logger.println("A162", e);
}
return null;
}
private long ICaptureCompile;
private AbstractCapture createICaptureTrace(File script) {
if (ICaptureCompile == script.lastModified())
return null;
ICaptureCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractCapture.class.getName();
String className = "scouter.agent.plugin.impl.CaptureImpl";
String ARG = "capArgs";
String ARG_SIG = "(" + nativeName(WrContext.class) + nativeName(HookArgs.class) + ")V";
String ARG_P1 = WrContext.class.getName();
String ARG_P2 = HookArgs.class.getName();
StringBuffer ARG_BODY = bodyTable.get("args");
String RTN = "capReturn";
String RTN_SIG = "(" + nativeName(WrContext.class) + nativeName(HookReturn.class) + ")V";
String RTN_P1 = WrContext.class.getName();
String RTN_P2 = HookReturn.class.getName();
StringBuffer RTN_BODY = bodyTable.get("return");
String THIS = "capThis";
String THIS_SIG = "(" + nativeName(WrContext.class) + nativeName(String.class) + nativeName(String.class)
+ nativeName(Object.class) + ")V";
String THIS_P1 = WrContext.class.getName();
String THIS_P2 = String.class.getName();
String THIS_P3 = String.class.getName();
String THIS_P4 = "Object";
StringBuffer THIS_BODY = bodyTable.get("this");
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
Class c = null;
CtClass cc = cp.get(superName);
CtClass impl = null;
CtMethod method_args;
CtMethod method_return;
CtMethod method_this;
StringBuffer sb;
try {
impl = cp.get(className);
impl.defrost();
// ARG METHOD
method_args = impl.getMethod(ARG, ARG_SIG);
// RETURN METHOD
method_return = impl.getMethod(RTN, RTN_SIG);
// THIS METHOD
method_this = impl.getMethod(THIS, THIS_SIG);
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
// ARG METHOD
sb = new StringBuffer();
sb.append("public void ").append(ARG).append("(");
sb.append(ARG_P1).append(" p1 ").append(",");
sb.append(ARG_P2).append(" p2 ");
sb.append("){}");
method_args = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_args);
// RTN METHOD
sb = new StringBuffer();
sb.append("public void ").append(RTN).append("(");
sb.append(RTN_P1).append(" p1 ").append(",");
sb.append(RTN_P2).append(" p2 ");
sb.append("){}");
method_return = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_return);
// THIS METHOD
sb = new StringBuffer();
sb.append("public void ").append(THIS).append("(");
sb.append(THIS_P1).append(" p1 ").append(",");
sb.append(THIS_P2).append(" p2 ").append(",");
sb.append(THIS_P3).append(" p3 ").append(",");
sb.append(THIS_P4).append(" p4 ");
sb.append("){}");
method_this = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_this);
}
sb = new StringBuffer();
sb.append("{");
sb.append(ARG_P1).append(" $ctx=$1;");
sb.append(ARG_P2).append(" $hook=$2;");
sb.append(ARG_BODY);
sb.append("\n}");
method_args.setBody(sb.toString());
sb = new StringBuffer();
sb.append("{");
sb.append(RTN_P1).append(" $ctx=$1;");
sb.append(RTN_P2).append(" $hook=$2;");
sb.append(RTN_BODY);
sb.append("\n}");
method_return.setBody(sb.toString());
sb = new StringBuffer();
sb.append("{");
sb.append(THIS_P1).append(" $ctx=$1;");
sb.append(THIS_P2).append(" $class=$2;");
sb.append(THIS_P3).append(" $desc=$3;");
sb.append(THIS_P4).append(" $this=$4;");
sb.append(THIS_BODY);
sb.append("\n}");
method_this.setBody(sb.toString());
c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractCapture plugin = (AbstractCapture) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractCapture.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Exception e) {
Logger.println("A905", e);
}
return null;
}
private long IJdbcPoolCompile;
private AbstractJdbcPool createIJdbcPool(File script) {
if (IJdbcPoolCompile == script.lastModified())
return null;
IJdbcPoolCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractJdbcPool.class.getName();
String className = "scouter.agent.plugin.impl.JdbcPoolImpl";
String URL = "url";
String URL_SIG = "(" + nativeName(WrContext.class) + nativeName(String.class) + nativeName(Object.class)
+ ")" + nativeName(String.class);
String URL_P1 = WrContext.class.getName();
String URL_P2 = String.class.getName();
String URL_P3 = "Object";
StringBuffer URL_BODY = bodyTable.get("url");
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
Class c = null;
CtClass cc = cp.get(superName);
CtClass impl = null;
CtMethod method = null;
try {
impl = cp.get(className);
impl.defrost();
method = impl.getMethod(URL, URL_SIG);
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
StringBuffer sb = new StringBuffer();
sb.append("public String ").append(URL).append("(");
sb.append(URL_P1).append(" p1 ").append(",");
sb.append(URL_P2).append(" p2 ").append(",");
sb.append(URL_P3).append(" p3 ");
sb.append("){return null;}");
method = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method);
}
StringBuffer sb = new StringBuffer();
sb.append("{");
sb.append(URL_P1).append(" $ctx=$1;");
sb.append(URL_P2).append(" $msg=$2;");
sb.append(URL_P3).append(" $pool=$3;");
sb.append(URL_BODY);
sb.append("\n}");
method.setBody(sb.toString());
c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractJdbcPool plugin = (AbstractJdbcPool) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractJdbcPool.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Exception e) {
Logger.println("A906", e);
}
return null;
}
private long IHttpCallCompile;
private AbstractHttpCall createIHttpCall(File script) {
if (IHttpCallCompile == script.lastModified())
return null;
IHttpCallCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractHttpCall.class.getName();
String className = "scouter.agent.plugin.impl.IHttCallTraceImpl";
String CALL = "call";
String CALL_SIG = "(" + nativeName(WrContext.class) + nativeName(WrHttpCallRequest.class) + ")V";
String CALL_P1 = WrContext.class.getName();
String CALL_P2 = WrHttpCallRequest.class.getName();
StringBuffer CALL_BODY = bodyTable.get(CALL);
if (CALL_BODY == null)
throw new CannotCompileException("no method body: " + CALL);
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
Class c = null;
CtClass cc = cp.get(superName);
CtClass impl = null;
StringBuffer sb;
CtMethod method = null;
try {
impl = cp.get(className);
impl.defrost();
method = impl.getMethod(CALL, CALL_SIG);
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
sb = new StringBuffer();
sb.append("public void ").append(CALL).append("(");
sb.append(CALL_P1).append(" p1 ").append(",");
sb.append(CALL_P2).append(" p2");
sb.append("){}");
method = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method);
}
sb = new StringBuffer();
sb.append("{");
sb.append(CALL_P1).append(" $ctx=$1;");
sb.append(CALL_P2).append(" $req=$2;");
sb.append(CALL_BODY);
sb.append("\n}");
method.setBody(sb.toString());
c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractHttpCall plugin = (AbstractHttpCall) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractHttpCall.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Exception e) {
Logger.println("A907", e);
}
return null;
}
private long ICounterCompile;
private AbstractCounter createCounter(File script) {
if (ICounterCompile == script.lastModified())
return null;
ICounterCompile = script.lastModified();
try {
HashMap<String, StringBuffer> bodyTable = loadFileText(script);
String superName = AbstractCounter.class.getName();
String className = "scouter.agent.plugin.impl.CounterImpl";
String METHOD_COUNTER = "counter";
String METHOD_SIGNATURE = "(" + nativeName(PerfCounterPack.class) +")V";
String METHOD_P1 = PerfCounterPack.class.getName();
if (bodyTable.containsKey(METHOD_COUNTER) == false)
throw new CannotCompileException("no method body: " + METHOD_COUNTER);
ClassPool cp = ClassPool.getDefault();
String jar = FileUtil.getJarFileName(PluginLoader.class);
if (jar != null) {
cp.appendClassPath(jar);
}
CtClass cc = cp.get(superName);
CtClass impl = null;
CtMethod method_counter = null;
try {
impl = cp.get(className);
impl.defrost();
method_counter = impl.getMethod(METHOD_COUNTER, METHOD_SIGNATURE);
} catch (NotFoundException e) {
impl = cp.makeClass(className, cc);
StringBuffer sb = new StringBuffer();
sb.append("public void ").append(METHOD_COUNTER).append("(").append(METHOD_P1).append(" p1){}");
method_counter = CtNewMethod.make(sb.toString(), impl);
impl.addMethod(method_counter);
}
StringBuffer body = new StringBuffer();
body.append("{");
body.append(METHOD_P1).append(" $pack=$1;");
body.append(bodyTable.get(METHOD_COUNTER));
body.append("\n}");
method_counter.setBody(body.toString());
Class c = impl.toClass(new URLClassLoader(new URL[0], this.getClass().getClassLoader()), null);
AbstractCounter plugin = (AbstractCounter) c.newInstance();
plugin.lastModified = script.lastModified();
Logger.println("PLUG-IN : " + AbstractCounter.class.getName() + " " + script.getName() + " loaded #"
+ Hexa32.toString32(plugin.hashCode()));
return plugin;
} catch (CannotCompileException ee) {
Logger.println("PLUG-IN : " + ee.getMessage());
} catch (Throwable e) {
Logger.println("A161", e);
}
return null;
}
private String nativeName(Class class1) {
return "L" + class1.getName().replace('.', '/') + ";";
}
}
| |
package pt.up.fe.comp.aa;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.Pair;
import pt.up.fe.comp.aa.parser.aaBaseVisitor;
import pt.up.fe.comp.aa.parser.aaParser;
import pt.up.fe.comp.fsa.Operations;
import pt.up.fe.comp.utils.Producer;
import pt.up.fe.comp.utils.SemanticError;
import pt.up.fe.comp.utils.SymbolTable;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class AaVisitor extends aaBaseVisitor<Object> {
public AaVisitor() {
_symbols = new SymbolTable<>(true);
}
public AaVisitor(boolean lazy, boolean createScopes) {
_symbols = new SymbolTable<>(lazy);
_createScopes = createScopes;
}
private class AaArgumentVisitor extends aaBaseVisitor<Object> {
@Override
public Object visitArg(@NotNull aaParser.ArgContext ctx) {
if (ctx.IDENTIFIER() != null) {
Object res = _symbols.get(ctx.IDENTIFIER().toString());
if (res == null)
throw new SemanticError(ctx, "Invalid identifier '" + ctx.IDENTIFIER().getText() + "'");
return res;
} else if (ctx.STRING() != null) {
String str = ctx.STRING().getText();
str = str.substring(1, str.length() - 1);
return str;
} else if (ctx.operation() != null) {
return AaVisitor.this.visit(ctx.operation());
}
throw new SemanticError(ctx, new IllegalArgumentException());
}
}
private AaArgumentVisitor _argVisitor = new AaArgumentVisitor();
private boolean _createScopes;
private SymbolTable<Object> _symbols;
@Override
public Object visitStmt_list(@NotNull aaParser.Stmt_listContext ctx) {
if (_createScopes) _symbols.beginScope();
Object result = null;
for (aaParser.StmtContext stmt : ctx.stmt())
result = visit(stmt);
if (_createScopes) _symbols.endScope();
return result;
}
@Override
public Object visitStmt(@NotNull aaParser.StmtContext ctx) {
assert (ctx.getChildCount() == 1);
return visitChildren(ctx);
}
private Class getArgType(@NotNull aaParser.ArgContext ctx) {
if (ctx.IDENTIFIER() != null) {
return _symbols.getType(ctx.IDENTIFIER().toString());
} else if (ctx.STRING() != null) {
return String.class;
} else if (ctx.operation() != null) {
return metaVisitOperation(ctx.operation()).a;
}
return null;
}
private Pair<Class, Producer<Object>> metaVisitOperation(@NotNull final aaParser.OperationContext ctx) {
String operation = ctx.operator.getText();
Class operations = Operations.class;
List<Method> methods = Arrays.asList(operations.getMethods());
Method op = null;
for (Method m : methods) {
if (m.getName().equals(operation)) {
op = m;
break;
}
}
if (op == null)
throw new SemanticError(ctx, "No such operation '" + operation + "'");
Class[] paramTypes = op.getParameterTypes();
aaParser.Arg_listContext argumentListContext = ctx.arg_list();
List<aaParser.ArgContext> args = argumentListContext.arg();
if (args.size() != paramTypes.length && !paramTypes[paramTypes.length - 1].isArray()) {
throw new SemanticError(ctx, "No operation '" + operation + "' with " + Integer.toString(args.size()) + " parameters defined");
}
{
int i = 0, j = 0;
for (; i < paramTypes.length && j < args.size(); ++i, ++j) {
Class paramT = getArgType(args.get(j));
if (paramTypes[i].isArray()) {
Class paramType = paramTypes[i].getComponentType();
while (j < args.size() && paramType.equals(paramT)) {
++j;
if (j == args.size()) break;
paramT = getArgType(args.get(j));
}
} else if (!paramTypes[i].equals(paramT))
throw new SemanticError(ctx, "No operation '" + operation + "' with argument " + Integer.toString(i) + " of type " + paramT.getSimpleName());
}
if (i < (paramTypes.length - (paramTypes[paramTypes.length - 1].isArray() ? 1 : 0)) || j < args.size())
throw new SemanticError(ctx, "No operation '" + operation + "' with " + Integer.toString(args.size()) + " parameters defined");
Class returnType = op.getReturnType();
return new Pair<Class, Producer<Object>>(returnType, new Producer<Object>() {
@Override
public Object produce() {
return visit(ctx);
}
});
}
}
@Override
public Object visitAttribution(@NotNull aaParser.AttributionContext ctx) {
String name = ctx.attribution_lhs().IDENTIFIER().getText();
aaParser.Attribution_rhsContext rhs = ctx.attribution_rhs();
if (rhs.operation() != null) {
Pair<Class, Producer<Object>> classProducerPair = metaVisitOperation(rhs.operation());
_symbols.addSymbol(name, classProducerPair.a, classProducerPair.b);
} else if (rhs.IDENTIFIER() != null) {
final String id = rhs.IDENTIFIER().getText();
if (!_symbols.contains(id))
throw new SemanticError(ctx, "Identifier " + id + " is not declared in the current scope");
_symbols.addSymbol(name, _symbols.getType(rhs.IDENTIFIER().getText()), new Producer<Object>() {
@Override
public Object produce() {
return _symbols.get(id);
}
});
} else if (rhs.STRING() != null) {
String string = rhs.STRING().getText();
string = string.substring(1, string.length() - 1);
final String finalString = string;
_symbols.addSymbol(name, String.class, new Producer<Object>() {
@Override
public Object produce() {
return finalString;
}
});
return null;
}
return null;
}
@Override
public Object visitControl_expr(@NotNull aaParser.Control_exprContext ctx) {
Object predEval = visit(ctx.predicate);
if (!(predEval instanceof Boolean))
throw new SemanticError(ctx, ctx.predicate.getText() + " is not a predicate");
if ((Boolean) predEval) {
return visit(ctx.trueCase);
} else if (ctx.falseCase != null) {
return visit(ctx.falseCase);
}
return null;
}
@Override
public Object visitOperation(@NotNull aaParser.OperationContext ctx) {
String operation = ctx.operator.getText();
Class operations = Operations.class;
List<Method> methods = Arrays.asList(operations.getMethods());
Method op = null;
for (Method m : methods) {
if (m.getName().equals(operation)) {
op = m;
break;
}
}
if (op == null)
throw new SemanticError(ctx, "No such operation '" + operation + "'");
Class[] paramTypes = op.getParameterTypes();
List<Object> params = new ArrayList<>();
aaParser.Arg_listContext argumentListContext = ctx.arg_list();
List<aaParser.ArgContext> args = argumentListContext.arg();
if (paramTypes.length != 0) {
if (args.size() != paramTypes.length && !paramTypes[paramTypes.length - 1].isArray()) {
throw new SemanticError(ctx, "No operation '" + operation + "' with " + Integer.toString(args.size()) + " parameters defined");
}
{
int i = 0, j = 0;
for (; i < paramTypes.length && j < args.size(); ++i, ++j) {
Object param = _argVisitor.visit(args.get(j));
if (paramTypes[i].isArray()) {
Class paramType = paramTypes[i].getComponentType();
List<Object> array = new ArrayList<>();
while (param != null && j < args.size() && paramType.isInstance(param)) {
array.add(param);
++j;
if (j == args.size()) break;
param = _argVisitor.visit(args.get(j));
}
param = array.toArray((Object[]) Array.newInstance(paramType, array.size()));
} else if (!paramTypes[i].isInstance(param))
throw new SemanticError(ctx, "No operation '" + operation + "' with argument " + Integer.toString(i) + " of type " + (param != null ? param.getClass().getSimpleName() : "void"));
params.add(param);
}
if (paramTypes[paramTypes.length - 1].isArray() && params.size() == (paramTypes.length - 1))
params.add(Array.newInstance(paramTypes[paramTypes.length - 1].getComponentType(), 0));
if (i < (paramTypes.length - (paramTypes[paramTypes.length - 1].isArray() ? 1 : 0)) || j < args.size())
throw new SemanticError(ctx, "No operation '" + operation + "' with " + Integer.toString(args.size()) + " parameters defined");
}
} else if (args.size() != 0) {
throw new SemanticError(ctx, "No operation '" + operation + "' with " + Integer.toString(args.size()) + " parameters defined");
}
try {
return op.invoke(null, params.toArray(new Object[params.size()]));
} catch (IllegalArgumentException | IllegalAccessException e) {
throw new SemanticError(ctx, e);
} catch (InvocationTargetException e) {
throw new SemanticError(ctx, e.getCause() != null ? e.getCause() : e);
}
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util.state;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.VarIntCoder;
import com.google.cloud.dataflow.sdk.runners.worker.MetricTrackingWindmillServerStub;
import com.google.cloud.dataflow.sdk.runners.worker.StreamingDataflowWorker;
import com.google.cloud.dataflow.sdk.runners.worker.windmill.Windmill;
import com.google.cloud.dataflow.sdk.runners.worker.windmill.Windmill.KeyedGetDataRequest;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.protobuf.ByteString;
import com.google.protobuf.ByteString.Output;
import org.hamcrest.Matchers;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import java.io.IOException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Tests for {@link WindmillStateReader}.
*/
@RunWith(JUnit4.class)
public class WindmillStateReaderTest {
private static final VarIntCoder INT_CODER = VarIntCoder.of();
private static final String COMPUTATION = "computation";
private static final ByteString DATA_KEY = ByteString.copyFromUtf8("DATA_KEY");
private static final long WORK_TOKEN = 5043L;
private static final ByteString STATE_KEY_1 = ByteString.copyFromUtf8("key1");
private static final ByteString STATE_KEY_2 = ByteString.copyFromUtf8("key2");
private static final String STATE_FAMILY = "family";
@Mock
private MetricTrackingWindmillServerStub mockWindmill;
private WindmillStateReader underTest;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
underTest = new WindmillStateReader(mockWindmill, COMPUTATION, DATA_KEY, WORK_TOKEN);
}
private Windmill.Value intValue(int value, boolean padded) throws IOException {
Output output = ByteString.newOutput();
if (padded) {
byte[] zero = {0x0};
output.write(zero);
}
INT_CODER.encode(value, output, Coder.Context.OUTER);
return Windmill.Value.newBuilder()
.setData(output.toByteString())
.setTimestamp(TimeUnit.MILLISECONDS.toMicros(BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()))
.build();
}
@Test
public void testReadList() throws Exception {
Future<Iterable<Integer>> future = underTest.listFuture(STATE_KEY_1, STATE_FAMILY, INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.GetDataRequest.Builder expectedRequest = Windmill.GetDataRequest.newBuilder();
expectedRequest
.addRequestsBuilder().setComputationId(COMPUTATION)
.addRequestsBuilder().setKey(DATA_KEY).setWorkToken(WORK_TOKEN)
.addListsToFetch(Windmill.TagList.newBuilder()
.setTag(STATE_KEY_1).setStateFamily(STATE_FAMILY).setEndTimestamp(Long.MAX_VALUE));
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder().setComputationId(COMPUTATION)
.addDataBuilder().setKey(DATA_KEY)
.addLists(Windmill.TagList.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.addValues(intValue(5, true))
.addValues(intValue(6, true)));
Mockito.when(mockWindmill.getStateData(expectedRequest.build())).thenReturn(response.build());
Iterable<Integer> results = future.get();
Mockito.verify(mockWindmill).getStateData(expectedRequest.build());
Mockito.verifyNoMoreInteractions(mockWindmill);
assertThat(results, Matchers.containsInAnyOrder(5, 6));
}
@Test
public void testReadValue() throws Exception {
Future<Integer> future = underTest.valueFuture(STATE_KEY_1, STATE_FAMILY, INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.GetDataRequest.Builder expectedRequest = Windmill.GetDataRequest.newBuilder();
expectedRequest
.addRequestsBuilder()
.setComputationId(COMPUTATION)
.addRequestsBuilder()
.setKey(DATA_KEY)
.setWorkToken(WORK_TOKEN)
.addValuesToFetch(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.build());
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder()
.setComputationId(COMPUTATION)
.addDataBuilder()
.setKey(DATA_KEY)
.addValues(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.setValue(intValue(8, false)));
Mockito.when(mockWindmill.getStateData(expectedRequest.build())).thenReturn(response.build());
Integer result = future.get();
Mockito.verify(mockWindmill).getStateData(expectedRequest.build());
Mockito.verifyNoMoreInteractions(mockWindmill);
assertThat(result, Matchers.equalTo(8));
}
@Test
public void testReadWatermark() throws Exception {
Future<Instant> future = underTest.watermarkFuture(STATE_KEY_1, STATE_FAMILY);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.GetDataRequest.Builder expectedRequest = Windmill.GetDataRequest.newBuilder();
expectedRequest.addRequestsBuilder()
.setComputationId(COMPUTATION)
.addRequestsBuilder()
.setKey(DATA_KEY)
.setWorkToken(WORK_TOKEN)
.addWatermarkHoldsToFetch(
Windmill.WatermarkHold.newBuilder().setTag(STATE_KEY_1).setStateFamily(STATE_FAMILY));
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder().setComputationId(COMPUTATION)
.addDataBuilder().setKey(DATA_KEY)
.addWatermarkHolds(Windmill.WatermarkHold.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.addTimestamps(5000000)
.addTimestamps(6000000));
Mockito.when(mockWindmill.getStateData(expectedRequest.build())).thenReturn(response.build());
Instant result = future.get();
Mockito.verify(mockWindmill).getStateData(expectedRequest.build());
assertThat(result, Matchers.equalTo(new Instant(5000)));
}
@Test
public void testBatching() throws Exception {
// Reads two lists and verifies that we batch them up correctly.
Future<Instant> watermarkFuture = underTest.watermarkFuture(STATE_KEY_2, STATE_FAMILY);
Future<Iterable<Integer>> listFuture =
underTest.listFuture(STATE_KEY_1, STATE_FAMILY, INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
ArgumentCaptor<Windmill.GetDataRequest> request =
ArgumentCaptor.forClass(Windmill.GetDataRequest.class);
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder().setComputationId(COMPUTATION)
.addDataBuilder().setKey(DATA_KEY)
.addWatermarkHolds(Windmill.WatermarkHold.newBuilder()
.setTag(STATE_KEY_2)
.setStateFamily(STATE_FAMILY)
.addTimestamps(5000000)
.addTimestamps(6000000))
.addLists(Windmill.TagList.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily(STATE_FAMILY)
.addValues(intValue(5, true))
.addValues(intValue(100, true)));
Mockito.when(mockWindmill.getStateData(Mockito.isA(Windmill.GetDataRequest.class)))
.thenReturn(response.build());
Instant result = watermarkFuture.get();
Mockito.verify(mockWindmill).getStateData(request.capture());
// Verify the request looks right.
assertThat(request.getValue().getRequestsCount(), Matchers.equalTo(1));
assertThat(request.getValue().getRequests(0).getComputationId(), Matchers.equalTo(COMPUTATION));
assertThat(request.getValue().getRequests(0).getRequestsCount(), Matchers.equalTo(1));
KeyedGetDataRequest keyedRequest = request.getValue().getRequests(0).getRequests(0);
assertThat(keyedRequest.getKey(), Matchers.equalTo(DATA_KEY));
assertThat(keyedRequest.getWorkToken(), Matchers.equalTo(WORK_TOKEN));
assertThat(keyedRequest.getListsToFetchCount(), Matchers.equalTo(1));
assertThat(keyedRequest.getListsToFetch(0).getEndTimestamp(), Matchers.equalTo(Long.MAX_VALUE));
assertThat(keyedRequest.getListsToFetch(0).getTag(), Matchers.equalTo(STATE_KEY_1));
assertThat(keyedRequest.getWatermarkHoldsToFetchCount(), Matchers.equalTo(1));
assertThat(keyedRequest.getWatermarkHoldsToFetch(0).getTag(), Matchers.equalTo(STATE_KEY_2));
// Verify the values returned to the user.
assertThat(result, Matchers.equalTo(new Instant(5000)));
Mockito.verifyNoMoreInteractions(mockWindmill);
assertThat(listFuture.get(), Matchers.containsInAnyOrder(5, 100));
Mockito.verifyNoMoreInteractions(mockWindmill);
// And verify that getting a future again returns the already completed future.
Future<Instant> watermarkFuture2 = underTest.watermarkFuture(STATE_KEY_2, STATE_FAMILY);
assertTrue(watermarkFuture2.isDone());
}
@Test
public void testNoStateFamily() throws Exception {
Future<Integer> future = underTest.valueFuture(STATE_KEY_1, "", INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.GetDataRequest.Builder expectedRequest = Windmill.GetDataRequest.newBuilder();
expectedRequest
.addRequestsBuilder()
.setComputationId(COMPUTATION)
.addRequestsBuilder()
.setKey(DATA_KEY)
.setWorkToken(WORK_TOKEN)
.addValuesToFetch(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily("")
.build());
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder()
.setComputationId(COMPUTATION)
.addDataBuilder()
.setKey(DATA_KEY)
.addValues(
Windmill.TagValue.newBuilder()
.setTag(STATE_KEY_1)
.setStateFamily("")
.setValue(intValue(8, false)));
Mockito.when(mockWindmill.getStateData(expectedRequest.build())).thenReturn(response.build());
Integer result = future.get();
Mockito.verify(mockWindmill).getStateData(expectedRequest.build());
Mockito.verifyNoMoreInteractions(mockWindmill);
assertThat(result, Matchers.equalTo(8));
}
@Test
public void testKeyTokenInvalid() throws Exception {
// Reads two lists and verifies that we batch them up correctly.
Future<Instant> watermarkFuture = underTest.watermarkFuture(STATE_KEY_2, STATE_FAMILY);
Future<Iterable<Integer>> listFuture =
underTest.listFuture(STATE_KEY_1, STATE_FAMILY, INT_CODER);
Mockito.verifyNoMoreInteractions(mockWindmill);
Windmill.GetDataResponse.Builder response = Windmill.GetDataResponse.newBuilder();
response
.addDataBuilder().setComputationId(COMPUTATION)
.addDataBuilder().setKey(DATA_KEY).setFailed(true);
Mockito.when(mockWindmill.getStateData(Mockito.isA(Windmill.GetDataRequest.class)))
.thenReturn(response.build());
try {
watermarkFuture.get();
fail("Expected KeyTokenInvalidException");
} catch (Throwable e) {
assertTrue(StreamingDataflowWorker.isKeyTokenInvalidException(e));
}
try {
listFuture.get();
fail("Expected KeyTokenInvalidException");
} catch (Throwable e) {
assertTrue(StreamingDataflowWorker.isKeyTokenInvalidException(e));
}
}
/**
* Tests that multiple reads for the same tag in the same batch are cached. We can't compare
* the futures since we've wrapped the delegate aronud them, so we just verify there is only
* one queued lookup.
*/
@Test
public void testCachingWithinBatch() throws Exception {
underTest.watermarkFuture(STATE_KEY_1, STATE_FAMILY);
underTest.watermarkFuture(STATE_KEY_1, STATE_FAMILY);
assertEquals(1, underTest.pendingLookups.size());
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.patterns;
import com.intellij.openapi.util.Key;
import com.intellij.util.PairProcessor;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author peter
*/
public abstract class ObjectPattern<T, Self extends ObjectPattern<T, Self>> implements Cloneable, ElementPattern<T> {
private InitialPatternCondition<T> myInitialCondition;
private Object myConditions;
protected ObjectPattern(@NotNull final InitialPatternCondition<T> condition) {
myInitialCondition = condition;
myConditions = null;
}
protected ObjectPattern(final Class<T> aClass) {
this(new InitialPatternCondition<T>(aClass) {
@Override
public boolean accepts(@Nullable final Object o, final ProcessingContext context) {
return aClass.isInstance(o);
}
});
}
@Override
public final boolean accepts(@Nullable Object t) {
return accepts(t, new ProcessingContext());
}
@Override
@SuppressWarnings("unchecked")
public boolean accepts(@Nullable final Object o, final ProcessingContext context) {
if (!myInitialCondition.accepts(o, context)) return false;
if (myConditions == null) return true;
if (o == null) return false;
if (myConditions instanceof PatternCondition) {
return ((PatternCondition)myConditions).accepts(o, context);
}
List<PatternCondition<T>> list = (List<PatternCondition<T>>)myConditions;
final int listSize = list.size();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < listSize; i++) {
if (!list.get(i).accepts((T)o, context)) return false;
}
return true;
}
@Override
@NotNull
@SuppressWarnings("unchecked")
public final ElementPatternCondition<T> getCondition() {
if (myConditions == null) {
return new ElementPatternCondition<>(myInitialCondition);
}
if (myConditions instanceof PatternCondition) {
PatternCondition<? super T> singleCondition = (PatternCondition)myConditions;
return new ElementPatternCondition<>(myInitialCondition, Collections.singletonList(singleCondition));
}
return new ElementPatternCondition<>(myInitialCondition, (List)myConditions);
}
@NotNull
public Self andNot(final ElementPattern pattern) {
ElementPattern<T> not = StandardPatterns.not(pattern);
return and(not);
}
@NotNull
public Self andOr(@NotNull ElementPattern... patterns) {
ElementPattern or = StandardPatterns.or(patterns);
return and(or);
}
@NotNull
public Self and(final ElementPattern pattern) {
return with(new PatternConditionPlus<T, T>("and", pattern) {
@Override
public boolean processValues(T t, ProcessingContext context, PairProcessor<T, ProcessingContext> processor) {
return processor.process(t, context);
}
});
}
@NotNull
public Self equalTo(@NotNull final T o) {
return with(new ValuePatternCondition<T>("equalTo") {
@Override
public boolean accepts(@NotNull final T t, final ProcessingContext context) {
return t.equals(o);
}
@Override
public Collection<T> getValues() {
return Collections.singletonList(o);
}
});
}
@NotNull
public Self oneOf(final T... values) {
final Collection<T> list;
final int length = values.length;
if (length == 1) {
list = Collections.singletonList(values[0]);
}
else if (length >= 11) {
list = new HashSet<>(Arrays.asList(values));
}
else {
list = Arrays.asList(values);
}
return with(new ValuePatternCondition<T>("oneOf") {
@Override
public Collection<T> getValues() {
return list;
}
@Override
public boolean accepts(@NotNull T t, ProcessingContext context) {
return list.contains(t);
}
});
}
@NotNull
public Self oneOf(final Collection<T> set) {
return with(new ValuePatternCondition<T>("oneOf") {
@Override
public Collection<T> getValues() {
return set;
}
@Override
public boolean accepts(@NotNull T t, ProcessingContext context) {
return set.contains(t);
}
});
}
@NotNull
public Self isNull() {
//noinspection Convert2Diamond (would break compilation: IDEA-168317)
return adapt(new ElementPatternCondition<T>(new InitialPatternCondition(Object.class) {
@Override
public boolean accepts(@Nullable final Object o, final ProcessingContext context) {
return o == null;
}
}));
}
@NotNull
public Self notNull() {
//noinspection Convert2Diamond (would break compilation: IDEA-168317)
return adapt(new ElementPatternCondition<T>(new InitialPatternCondition(Object.class) {
@Override
public boolean accepts(@Nullable final Object o, final ProcessingContext context) {
return o != null;
}
}));
}
@NotNull
public Self save(final Key<? super T> key) {
return with(new PatternCondition<T>("save") {
@Override
public boolean accepts(@NotNull final T t, final ProcessingContext context) {
context.put((Key)key, t);
return true;
}
});
}
@NotNull
public Self save(@NonNls final String key) {
return with(new PatternCondition<T>("save") {
@Override
public boolean accepts(@NotNull final T t, final ProcessingContext context) {
context.put(key, t);
return true;
}
});
}
@NotNull
public Self with(final PatternCondition<? super T> pattern) {
final ElementPatternCondition<T> condition = getCondition().append(pattern);
return adapt(condition);
}
@NotNull
private Self adapt(final ElementPatternCondition<T> condition) {
try {
final ObjectPattern s = (ObjectPattern)clone();
s.myInitialCondition = condition.getInitialCondition();
List<PatternCondition<? super T>> conditions = condition.getConditions();
s.myConditions = conditions.isEmpty() ? null : conditions.size() == 1 ? conditions.get(0) : conditions;
//noinspection unchecked
return (Self)s;
}
catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
@NotNull
public Self without(final PatternCondition<? super T> pattern) {
return with(new PatternCondition<T>("without") {
@Override
public boolean accepts(@NotNull final T o, final ProcessingContext context) {
return !pattern.accepts(o, context);
}
});
}
public String toString() {
return getCondition().toString();
}
public static class Capture<T> extends ObjectPattern<T,Capture<T>> {
public Capture(final Class<T> aClass) {
super(aClass);
}
public Capture(@NotNull final InitialPatternCondition<T> condition) {
super(condition);
}
}
}
| |
/*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2004 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2013/03/03 Issue 546: Remove all template Javadoc comments
// ZAP: 2013/11/28 Issue 923: Allow individual rule thresholds and strengths to be set via GUI
// ZAP: 2016/01/19 Allow to obtain the ScanPolicy
// ZAP: 2016/04/04 Use StatusUI in scanners' dialogues
// ZAP: 2016/07/25 Use new AllCategoryTableModel's constructor
// ZAP: 2017/06/22 Focus the component that contains validation errors.
// ZAP: 2018/01/30 Do not rely on default locale for upper/lower case conversions (when locale is not important).
package org.zaproxy.zap.extension.ascan;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.EventListener;
import java.util.List;
import java.util.Locale;
import javax.swing.DefaultCellEditor;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.RowSorter;
import javax.swing.SortOrder;
import javax.swing.table.TableColumn;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.Plugin;
import org.parosproxy.paros.core.scanner.Plugin.AlertThreshold;
import org.parosproxy.paros.core.scanner.Plugin.AttackStrength;
import org.parosproxy.paros.view.AbstractParamPanel;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.control.AddOn;
import org.zaproxy.zap.utils.DisplayUtils;
import org.zaproxy.zap.utils.ZapTextField;
import org.zaproxy.zap.view.LayoutHelper;
public class PolicyAllCategoryPanel extends AbstractParamPanel {
//private static final String ILLEGAL_CHRS = "/`?*\\<>|\":\t\n\r";
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(PolicyAllCategoryPanel.class);
private ZapTextField policyName = null;
private JTable tableTest = null;
private JScrollPane jScrollPane = null;
private AllCategoryTableModel allCategoryTableModel = null;
private JComboBox<String> policySelector = null;
private JComboBox<String> comboThreshold = null;
private JLabel labelThresholdNotes = null;
private JComboBox<String> comboStrength = null;
private JLabel labelStrengthNotes = null;
private JComboBox<String> applyToThreshold = null;
private JComboBox<String> applyToStrength = null;
private JComboBox<String> applyToThresholdTarget = null;
private JComboBox<String> applyToStrengthTarget = null;
private ExtensionActiveScan extension;
private ScanPolicy policy;
private String currentName;
private boolean switchable = false;
private static final int[] width = {300, 100, 100};
public PolicyAllCategoryPanel(Window parent, ExtensionActiveScan extension, ScanPolicy policy) {
this(parent, extension, policy, false);
}
public PolicyAllCategoryPanel(Window parent, ExtensionActiveScan extension, ScanPolicy policy, boolean switchable) {
super();
this.extension = extension;
this.policy = policy;
this.currentName = policy.getName();
this.switchable = switchable;
initialize();
}
/**
* This method initializes this
*/
private void initialize() {
this.setLayout(new GridBagLayout());
this.setSize(375, 205);
this.setName("categoryPanel");
// Add Attack settings section - a copy of the options dialog
// ---------------------------------------------
int row = 0;
this.add(new JLabel(Constant.messages.getString("ascan.policy.name.label")),
LayoutHelper.getGBC(0, row, 1, 0.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
if (this.switchable) {
this.add(getPolicySelector(),
LayoutHelper.getGBC(1, row, 2, 1.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
} else {
this.add(getPolicyName(),
LayoutHelper.getGBC(1, row, 2, 1.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
}
row++;
this.add(new JLabel(Constant.messages.getString("ascan.options.level.label")),
LayoutHelper.getGBC(0, row, 1, 0.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
this.add(getComboThreshold(),
LayoutHelper.getGBC(1, row, 1, 0.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
this.add(getThresholdNotes(),
LayoutHelper.getGBC(2, row, 1, 1.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
row++;
this.add(new JLabel(Constant.messages.getString("ascan.options.strength.label")),
LayoutHelper.getGBC(0, row, 1, 0.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
this.add(getComboStrength(),
LayoutHelper.getGBC(1, row, 1, 0.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
this.add(getStrengthNotes(),
LayoutHelper.getGBC(2, row, 1, 1.0D, 0, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2)));
// 'Apply to' controls
JPanel applyToPanel = new JPanel();
applyToPanel.setLayout(new GridBagLayout());
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.apply.label")),
LayoutHelper.getGBC(0, 0, 1, 0.0, new Insets(2, 2, 2, 2)));
applyToPanel.add(getApplyToThreshold(), LayoutHelper.getGBC(1, 0, 1, 0.0));
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.thresholdTo.label")),
LayoutHelper.getGBC(2, 0, 1, 0.0, new Insets(2, 2, 2, 2)));
applyToPanel.add(getApplyToThresholdTarget(), LayoutHelper.getGBC(3, 0, 1, 0.0));
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.rules.label")), LayoutHelper.getGBC(4, 0, 1, 0.0, new Insets(2, 2, 2, 2)));
JButton applyThresholdButton = new JButton(Constant.messages.getString("ascan.options.go.button"));
applyThresholdButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
applyThreshold (strToThreshold((String)getApplyToThreshold().getSelectedItem()),
(String)getApplyToThresholdTarget().getSelectedItem());
getAllCategoryTableModel().fireTableDataChanged();
}});
applyToPanel.add(applyThresholdButton, LayoutHelper.getGBC(5, 0, 1, 0.0));
applyToPanel.add(new JLabel(""), LayoutHelper.getGBC(6, 0, 1, 1.0)); // Spacer
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.apply.label")),
LayoutHelper.getGBC(0, 1, 1, 0.0, new Insets(2, 2, 2, 2)));
applyToPanel.add(getApplyToStrength(), LayoutHelper.getGBC(1, 1, 1, 0.0));
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.strengthTo.label")), LayoutHelper.getGBC(2, 1, 1, 0.0, new Insets(2, 2, 2, 2)));
applyToPanel.add(getApplyToStrengthTarget(), LayoutHelper.getGBC(3, 1, 1, 0.0));
applyToPanel.add(new JLabel(Constant.messages.getString("ascan.options.rules.label")), LayoutHelper.getGBC(4, 1, 1, 0.0, new Insets(2, 2, 2, 2)));
JButton applyStrengthButton = new JButton(Constant.messages.getString("ascan.options.go.button"));
applyStrengthButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
applyStrength (strToStrength((String)getApplyToStrength().getSelectedItem()),
(String)getApplyToStrengthTarget().getSelectedItem());
getAllCategoryTableModel().fireTableDataChanged();
}});
applyToPanel.add(applyStrengthButton, LayoutHelper.getGBC(5, 1, 1, 0.0));
applyToPanel.add(new JLabel(""), LayoutHelper.getGBC(6, 1, 1, 1.0)); // Spacer
row++;
this.add(applyToPanel,
LayoutHelper.getGBC(0, row, 3, 0.0D, 0.0D, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0)));
// Add the scrolling list of active plugin categories
row++;
this.add(getJScrollPane(),
LayoutHelper.getGBC(0, row, 3, 1.0D, 1.0D, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0)));
this.setThreshold(policy.getDefaultThreshold());
this.setStrength(policy.getDefaultStrength());
}
public void initialise(ScanPolicy policy) {
this.getPolicyName().setText(policy.getName());
}
private ZapTextField getPolicyName() {
if (policyName == null) {
policyName = new ZapTextField();
policyName.setText(policy.getName());
}
return policyName;
}
private JComboBox<String> getPolicySelector() {
if (policySelector == null) {
policySelector = new JComboBox<>();
for (String policy : extension.getPolicyManager().getAllPolicyNames()) {
policySelector.addItem(policy);
}
policySelector.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
String policyName = (String) policySelector.getSelectedItem();
if (policyName == null) {
return;
}
ScanPolicy policy;
try {
policy = extension.getPolicyManager().getPolicy(policyName);
if (policy != null) {
setScanPolicy(policy);
fireScanPolicyChanged(policy);
}
} catch (ConfigurationException e1) {
logger.error(e1.getMessage(), e1);
}
}});
}
return policySelector;
}
/**
* Reloads the scan policies, which will pick any new ones that have been defined and selects the policy with the given
* name.
*
* @param scanPolicyName the name of the policy that should be selected
* @since 2.5.0
*/
public void reloadPolicies(String scanPolicyName) {
DefaultComboBoxModel<String> policies = new DefaultComboBoxModel<>();
for (String policy : extension.getPolicyManager().getAllPolicyNames()) {
policies.addElement(policy);
}
getPolicySelector().setModel(policies);
getPolicySelector().setSelectedItem(scanPolicyName);
}
/**
* Reloads the scan policies, which will pick any new ones that have been defined
*/
public void reloadPolicies() {
// Ensure policySelector is initialized
Object selected = getPolicySelector().getSelectedItem();
reloadPolicies((String) selected);
}
private AlertThreshold strToThreshold(String str) {
if (str.equals(Constant.messages.getString("ascan.options.level.off"))) {
return AlertThreshold.OFF;
}
if (str.equals(Constant.messages.getString("ascan.options.level.low"))) {
return AlertThreshold.LOW;
}
if (str.equals(Constant.messages.getString("ascan.options.level.medium"))) {
return AlertThreshold.MEDIUM;
}
if (str.equals(Constant.messages.getString("ascan.options.level.high"))) {
return AlertThreshold.HIGH;
}
return AlertThreshold.DEFAULT;
}
private JComboBox<String> getApplyToThreshold() {
if (applyToThreshold == null) {
applyToThreshold = new JComboBox<>();
applyToThreshold.addItem(Constant.messages.getString("ascan.options.level.default"));
applyToThreshold.addItem(Constant.messages.getString("ascan.options.level.off"));
applyToThreshold.addItem(Constant.messages.getString("ascan.options.level.low"));
applyToThreshold.addItem(Constant.messages.getString("ascan.options.level.medium"));
applyToThreshold.addItem(Constant.messages.getString("ascan.options.level.high"));
}
return applyToThreshold;
}
private JComboBox<String> getApplyToThresholdTarget() {
if (applyToThresholdTarget == null) {
applyToThresholdTarget = createStatusComboBox();
}
return applyToThresholdTarget;
}
/**
* Creates a {@code JComboBox} with scanners' statuses, "all", release, beta and alpha.
*
* @return a {@code JComboBox} with scanners' statuses
*/
private JComboBox<String> createStatusComboBox() {
JComboBox<String> comboBox = new JComboBox<>();
comboBox.addItem(Constant.messages.getString("ascan.policy.table.quality.all"));
View view = View.getSingleton();
comboBox.addItem(view.getStatusUI(AddOn.Status.release).toString());
comboBox.addItem(view.getStatusUI(AddOn.Status.beta).toString());
comboBox.addItem(view.getStatusUI(AddOn.Status.alpha).toString());
return comboBox;
}
private void applyThreshold(AlertThreshold threshold, String target) {
for (Plugin plugin : policy.getPluginFactory().getAllPlugin()) {
if (hasSameStatus(plugin, target)) {
plugin.setAlertThreshold(threshold);
}
}
}
/**
* Tells whether or not the given {@code scanner} has the given {@code status}.
* <p>
* If the given {@code status} represents all statuses it returns always {@code true}.
*
* @param scanner the scanner that will be checked
* @param status the status to check
* @return {@code true} if it has the same status, {@code false} otherwise.
* @see Plugin#getStatus()
*/
private boolean hasSameStatus(Plugin scanner, String status) {
if (status.equals(Constant.messages.getString("ascan.policy.table.quality.all"))) {
return true;
}
return status.equals(View.getSingleton().getStatusUI(scanner.getStatus()).toString());
}
private AttackStrength strToStrength(String str) {
if (str.equals(Constant.messages.getString("ascan.options.strength.low"))) {
return AttackStrength.LOW;
}
if (str.equals(Constant.messages.getString("ascan.options.strength.medium"))) {
return AttackStrength.MEDIUM;
}
if (str.equals(Constant.messages.getString("ascan.options.strength.high"))) {
return AttackStrength.HIGH;
}
if (str.equals(Constant.messages.getString("ascan.options.strength.insane"))) {
return AttackStrength.INSANE;
}
return AttackStrength.DEFAULT;
}
private JComboBox<String> getApplyToStrength() {
if (applyToStrength == null) {
applyToStrength = new JComboBox<>();
applyToStrength.addItem(Constant.messages.getString("ascan.options.strength.default"));
applyToStrength.addItem(Constant.messages.getString("ascan.options.strength.low"));
applyToStrength.addItem(Constant.messages.getString("ascan.options.strength.medium"));
applyToStrength.addItem(Constant.messages.getString("ascan.options.strength.high"));
applyToStrength.addItem(Constant.messages.getString("ascan.options.strength.insane"));
}
return applyToStrength;
}
private JComboBox<String> getApplyToStrengthTarget() {
if (applyToStrengthTarget == null) {
applyToStrengthTarget = createStatusComboBox();
}
return applyToStrengthTarget;
}
private void applyStrength(AttackStrength strength, String target) {
for (Plugin plugin : policy.getPluginFactory().getAllPlugin()) {
if (hasSameStatus(plugin, target)) {
plugin.setAttackStrength(strength);
}
}
}
private void setThreshold(AlertThreshold threshold) {
getComboThreshold().setSelectedItem(
Constant.messages.getString("ascan.options.level." + threshold.name().toLowerCase(Locale.ROOT)));
getThresholdNotes().setText(
Constant.messages.getString("ascan.options.level." + threshold.name().toLowerCase(Locale.ROOT) + ".label"));
}
private void setStrength(AttackStrength strength) {
getComboStrength().setSelectedItem(
Constant.messages.getString("ascan.options.strength." + strength.name().toLowerCase(Locale.ROOT)));
getStrengthNotes().setText(
Constant.messages.getString("ascan.options.strength." + strength.name().toLowerCase(Locale.ROOT) + ".label"));
}
/**
* This method initializes tableTest
*
* @return javax.swing.JTable
*/
private JTable getTableTest() {
if (tableTest == null) {
tableTest = new JTable();
tableTest.setModel(getAllCategoryTableModel());
tableTest.setRowHeight(DisplayUtils.getScaledSize(18));
tableTest.setIntercellSpacing(new java.awt.Dimension(1, 1));
tableTest.setAutoCreateRowSorter(true);
//Default sort by name (column 0)
List<RowSorter.SortKey> sortKeys = new ArrayList<RowSorter.SortKey>(1);
sortKeys.add(new RowSorter.SortKey(0, SortOrder.ASCENDING));
tableTest.getRowSorter().setSortKeys(sortKeys);
for (int i = 0; i < tableTest.getColumnCount()-1; i++) {
TableColumn column = tableTest.getColumnModel().getColumn(i);
column.setPreferredWidth(width[i]);
}
JComboBox<String> jcb1 = new JComboBox<>();
jcb1.addItem(""); // Always show a blank one for where they are not all the same
for (AlertThreshold level : AlertThreshold.values()) {
jcb1.addItem(Constant.messages.getString("ascan.policy.level." + level.name().toLowerCase(Locale.ROOT)));
}
tableTest.getColumnModel().getColumn(1).setCellEditor(new DefaultCellEditor(jcb1));
JComboBox<String> jcb2 = new JComboBox<>();
jcb2.addItem(""); // Always show a blank one for where they are not all the same
for (AttackStrength level : AttackStrength.values()) {
jcb2.addItem(Constant.messages.getString("ascan.policy.level." + level.name().toLowerCase(Locale.ROOT)));
}
tableTest.getColumnModel().getColumn(2).setCellEditor(new DefaultCellEditor(jcb2));
}
return tableTest;
}
public void setScanPolicy(ScanPolicy scanPolicy) {
if (! switchable) {
throw new InvalidParameterException("Cannot change policy if the panel has not been defined as switchable");
}
this.policy = scanPolicy;
this.getPolicySelector().setSelectedItem(scanPolicy.getName());
this.setThreshold(scanPolicy.getDefaultThreshold());
this.setStrength(scanPolicy.getDefaultStrength());
this.getAllCategoryTableModel().setPluginFactory(scanPolicy.getPluginFactory());
}
@Override
public void initParam(Object obj) {
}
@Override
public void validateParam(Object obj) throws Exception {
String newName = getPolicyName().getText();
if (newName.length() == 0) {
getPolicyName().requestFocusInWindow();
throw new Exception(Constant.messages.getString("ascan.policy.warn.noname"));
} else if (! extension.getPolicyManager().isLegalPolicyName(newName)) {
getPolicyName().requestFocusInWindow();
throw new Exception(Constant.messages.getString("ascan.policy.warn.badname", PolicyManager.ILLEGAL_POLICY_NAME_CHRS));
} else if (! newName.equals(currentName)) {
// Name changed
if (extension.getPolicyManager().getAllPolicyNames().contains(newName)) {
getPolicyName().requestFocusInWindow();
throw new Exception(Constant.messages.getString("ascan.policy.warn.exists"));
}
}
}
@Override
public void saveParam(Object obj) throws Exception {
this.policy.setName(getPolicyName().getText());
}
/**
* This method initializes jScrollPane
*
* @return javax.swing.JScrollPane
*/
private JScrollPane getJScrollPane() {
if (jScrollPane == null) {
jScrollPane = new JScrollPane();
jScrollPane.setViewportView(getTableTest());
jScrollPane.setBorder(javax.swing.BorderFactory.createEtchedBorder(javax.swing.border.EtchedBorder.RAISED));
}
return jScrollPane;
}
/**
* This method initializes categoryTableModel
*
* @return TableModel
*/
private AllCategoryTableModel getAllCategoryTableModel() {
if (allCategoryTableModel == null) {
allCategoryTableModel = new AllCategoryTableModel(policy.getPluginFactory());
}
return allCategoryTableModel;
}
private JLabel getThresholdNotes() {
if (labelThresholdNotes == null) {
labelThresholdNotes = new JLabel();
}
return labelThresholdNotes;
}
private JComboBox<String> getComboThreshold() {
if (comboThreshold == null) {
comboThreshold = new JComboBox<>();
comboThreshold.addItem(Constant.messages.getString("ascan.options.level.low"));
comboThreshold.addItem(Constant.messages.getString("ascan.options.level.medium"));
comboThreshold.addItem(Constant.messages.getString("ascan.options.level.high"));
comboThreshold.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// Set the explanation and save
if (comboThreshold.getSelectedItem().equals(Constant.messages.getString("ascan.options.level.low"))) {
getThresholdNotes().setText(Constant.messages.getString("ascan.options.level.low.label"));
policy.setDefaultThreshold(AlertThreshold.LOW);
} else if (comboThreshold.getSelectedItem().equals(Constant.messages.getString("ascan.options.level.medium"))) {
getThresholdNotes().setText(Constant.messages.getString("ascan.options.level.medium.label"));
policy.setDefaultThreshold(AlertThreshold.MEDIUM);
} else {
getThresholdNotes().setText(Constant.messages.getString("ascan.options.level.high.label"));
policy.setDefaultThreshold(AlertThreshold.HIGH);
}
}
});
}
return comboThreshold;
}
private JLabel getStrengthNotes() {
if (labelStrengthNotes == null) {
labelStrengthNotes = new JLabel();
}
return labelStrengthNotes;
}
private JComboBox<String> getComboStrength() {
if (comboStrength == null) {
comboStrength = new JComboBox<>();
comboStrength.addItem(Constant.messages.getString("ascan.options.strength.low"));
comboStrength.addItem(Constant.messages.getString("ascan.options.strength.medium"));
comboStrength.addItem(Constant.messages.getString("ascan.options.strength.high"));
comboStrength.addItem(Constant.messages.getString("ascan.options.strength.insane"));
comboStrength.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// Set the explanation and save
if (comboStrength.getSelectedItem().equals(Constant.messages.getString("ascan.options.strength.low"))) {
getStrengthNotes().setText(Constant.messages.getString("ascan.options.strength.low.label"));
policy.setDefaultStrength(AttackStrength.LOW);
} else if (comboStrength.getSelectedItem().equals(Constant.messages.getString("ascan.options.strength.medium"))) {
getStrengthNotes().setText(Constant.messages.getString("ascan.options.strength.medium.label"));
policy.setDefaultStrength(AttackStrength.MEDIUM);
} else if (comboStrength.getSelectedItem().equals(Constant.messages.getString("ascan.options.strength.high"))) {
getStrengthNotes().setText(Constant.messages.getString("ascan.options.strength.high.label"));
policy.setDefaultStrength(AttackStrength.HIGH);
} else {
getStrengthNotes().setText(Constant.messages.getString("ascan.options.strength.insane.label"));
policy.setDefaultStrength(AttackStrength.INSANE);
}
}
});
}
return comboStrength;
}
@Override
public String getHelpIndex() {
return "ui.dialogs.scanpolicy";
}
/**
* Adds the given {@code listener} to the list that's notified of each change in the selected scan policy.
*
* @param listener the listener that will be added
* @since 2.5.0
*/
public void addScanPolicyChangedEventListener(ScanPolicyChangedEventListener listener) {
listenerList.add(ScanPolicyChangedEventListener.class, listener);
}
/**
* Removes the given {@code listener} from the list that's notified of each change in the selected scan policy.
*
* @param listener the listener that will be removed
* @since 2.5.0
*/
public void removeScanPolicyChangedEventListener(ScanPolicyChangedEventListener listener) {
listenerList.remove(ScanPolicyChangedEventListener.class, listener);
}
private void fireScanPolicyChanged(ScanPolicy scanPolicy) {
Object[] listeners = listenerList.getListenerList();
for (int i = listeners.length - 2; i >= 0; i -= 2) {
if (listeners[i] == ScanPolicyChangedEventListener.class) {
((ScanPolicyChangedEventListener) listeners[i + 1]).scanPolicyChanged(scanPolicy);
}
}
}
/**
* The listener interface for receiving notifications of changes in the selected scan policy.
*
* @since 2.5.0
* @see PolicyAllCategoryPanel#addScanPolicyChangedEventListener(ScanPolicyChangedEventListener)
* @see PolicyAllCategoryPanel#removeScanPolicyChangedEventListener(ScanPolicyChangedEventListener)
*/
public interface ScanPolicyChangedEventListener extends EventListener {
/**
* Notifies that the selected scan policy was changed.
*
* @param scanPolicy the new selected scan policy
*/
public void scanPolicyChanged(ScanPolicy scanPolicy);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.filter.ClusteringIndexSliceFilter;
import org.apache.cassandra.db.filter.ColumnFilter;
import org.apache.cassandra.db.filter.DataLimits;
import org.apache.cassandra.db.filter.RowFilter;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.cassandra.db.partitions.FilteredPartition;
import org.apache.cassandra.db.partitions.PartitionIterator;
import org.apache.cassandra.db.partitions.UnfilteredPartitionIterator;
import org.apache.cassandra.db.partitions.UnfilteredPartitionIterators;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.rows.RowIterator;
import org.apache.cassandra.db.rows.SerializationHelper;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import org.apache.cassandra.db.rows.UnfilteredRowIterators;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.util.DataInputBuffer;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import static org.junit.Assert.assertEquals;
public class ReadCommandTest
{
private static final String KEYSPACE = "ReadCommandTest";
private static final String CF1 = "Standard1";
private static final String CF2 = "Standard2";
private static final String CF3 = "Standard3";
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
DatabaseDescriptor.daemonInitialization();
CFMetaData metadata1 = SchemaLoader.standardCFMD(KEYSPACE, CF1);
CFMetaData metadata2 = CFMetaData.Builder.create(KEYSPACE, CF2)
.addPartitionKey("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance).build();
CFMetaData metadata3 = CFMetaData.Builder.create(KEYSPACE, CF3)
.addPartitionKey("key", BytesType.instance)
.addClusteringColumn("col", AsciiType.instance)
.addRegularColumn("a", AsciiType.instance)
.addRegularColumn("b", AsciiType.instance)
.addRegularColumn("c", AsciiType.instance)
.addRegularColumn("d", AsciiType.instance)
.addRegularColumn("e", AsciiType.instance)
.addRegularColumn("f", AsciiType.instance).build();
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE,
KeyspaceParams.simple(1),
metadata1,
metadata2,
metadata3);
}
@Test
public void testPartitionRangeAbort() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF1);
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key1"))
.clustering("Column1")
.add("val", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key2"))
.clustering("Column1")
.add("val", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs).build();
assertEquals(2, Util.getAll(readCommand).size());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionSliceAbort() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).build();
List<FilteredPartition> partitions = Util.getAll(readCommand);
assertEquals(1, partitions.size());
assertEquals(2, partitions.get(0).rowCount());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionNamesAbort() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF2);
cfs.truncateBlocking();
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key"))
.clustering("cc")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes("key"))
.clustering("dd")
.add("a", ByteBufferUtil.bytes("abcd"))
.build()
.apply();
ReadCommand readCommand = Util.cmd(cfs, Util.dk("key")).includeRow("cc").includeRow("dd").build();
List<FilteredPartition> partitions = Util.getAll(readCommand);
assertEquals(1, partitions.size());
assertEquals(2, partitions.get(0).rowCount());
readCommand.abort();
assertEquals(0, Util.getAll(readCommand).size());
}
@Test
public void testSinglePartitionGroupMerge() throws Exception
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE).getColumnFamilyStore(CF3);
String[][][] groups = new String[][][] {
new String[][] {
new String[] { "1", "key1", "aa", "a" }, // "1" indicates to create the data, "-1" to delete the row
new String[] { "1", "key2", "bb", "b" },
new String[] { "1", "key3", "cc", "c" }
},
new String[][] {
new String[] { "1", "key3", "dd", "d" },
new String[] { "1", "key2", "ee", "e" },
new String[] { "1", "key1", "ff", "f" }
},
new String[][] {
new String[] { "1", "key6", "aa", "a" },
new String[] { "1", "key5", "bb", "b" },
new String[] { "1", "key4", "cc", "c" }
},
new String[][] {
new String[] { "-1", "key6", "aa", "a" },
new String[] { "-1", "key2", "bb", "b" }
}
};
// Given the data above, when the keys are sorted and the deletions removed, we should
// get these clustering rows in this order
String[] expectedRows = new String[] { "aa", "ff", "ee", "cc", "dd", "cc", "bb"};
List<ByteBuffer> buffers = new ArrayList<>(groups.length);
int nowInSeconds = FBUtilities.nowInSeconds();
ColumnFilter columnFilter = ColumnFilter.allRegularColumnsBuilder(cfs.metadata).build();
RowFilter rowFilter = RowFilter.create();
Slice slice = Slice.make(ClusteringBound.BOTTOM, ClusteringBound.TOP);
ClusteringIndexSliceFilter sliceFilter = new ClusteringIndexSliceFilter(Slices.with(cfs.metadata.comparator, slice), false);
for (String[][] group : groups)
{
cfs.truncateBlocking();
List<SinglePartitionReadCommand> commands = new ArrayList<>(group.length);
for (String[] data : group)
{
if (data[0].equals("1"))
{
new RowUpdateBuilder(cfs.metadata, 0, ByteBufferUtil.bytes(data[1]))
.clustering(data[2])
.add(data[3], ByteBufferUtil.bytes("blah"))
.build()
.apply();
}
else
{
RowUpdateBuilder.deleteRow(cfs.metadata, FBUtilities.timestampMicros(), ByteBufferUtil.bytes(data[1]), data[2]).apply();
}
commands.add(SinglePartitionReadCommand.create(cfs.metadata, nowInSeconds, columnFilter, rowFilter, DataLimits.NONE, Util.dk(data[1]), sliceFilter));
}
cfs.forceBlockingFlush();
ReadQuery query = new SinglePartitionReadCommand.Group(commands, DataLimits.NONE);
try (ReadExecutionController executionController = query.executionController();
UnfilteredPartitionIterator iter = query.executeLocally(executionController);
DataOutputBuffer buffer = new DataOutputBuffer())
{
UnfilteredPartitionIterators.serializerForIntraNode().serialize(iter,
columnFilter,
buffer,
MessagingService.current_version);
buffers.add(buffer.buffer());
}
}
// deserialize, merge and check the results are all there
List<UnfilteredPartitionIterator> iterators = new ArrayList<>();
for (ByteBuffer buffer : buffers)
{
try (DataInputBuffer in = new DataInputBuffer(buffer, true))
{
iterators.add(UnfilteredPartitionIterators.serializerForIntraNode().deserialize(in,
MessagingService.current_version,
cfs.metadata,
columnFilter,
SerializationHelper.Flag.LOCAL));
}
}
try(PartitionIterator partitionIterator = UnfilteredPartitionIterators.mergeAndFilter(iterators,
nowInSeconds,
new UnfilteredPartitionIterators.MergeListener()
{
public UnfilteredRowIterators.MergeListener getRowMergeListener(DecoratedKey partitionKey, List<UnfilteredRowIterator> versions)
{
return null;
}
public void close()
{
}
}))
{
int i = 0;
int numPartitions = 0;
while (partitionIterator.hasNext())
{
numPartitions++;
try(RowIterator rowIterator = partitionIterator.next())
{
while (rowIterator.hasNext())
{
Row row = rowIterator.next();
assertEquals("col=" + expectedRows[i++], row.clustering().toString(cfs.metadata));
//System.out.print(row.toString(cfs.metadata, true));
}
}
}
assertEquals(5, numPartitions);
assertEquals(expectedRows.length, i);
}
}
}
| |
/*
* Java OTR library Copyright (C) 2008-2009 Ian Goldberg, Muhaimeen Ashraf,
* Andrew Chung, Can Tang
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of version 2.1 of the GNU Lesser General Public License as
* published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/* Ported to otr4j by devrandom */
package net.java.otr4j.crypto;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import net.java.otr4j.io.OtrInputStream;
import net.java.otr4j.io.OtrOutputStream;
import net.java.otr4j.io.SerializationUtils;
public class SM {
static public class SMState {
BigInteger secret, x2, x3, g1, g2, g3, g3o, p, q, pab, qab;
public int nextExpected;
boolean receivedQuestion;
public int smProgState;
public SMState() {
g1 = new BigInteger(1, SM.GENERATOR_S);
smProgState = SM.PROG_OK;
}
public boolean isReceivedQuestion() {
return receivedQuestion;
}
}
static public class SMException extends Exception {
private static final long serialVersionUID = 1L;
public SMException() {
super("");
}
public SMException(Throwable t) {
super(t.getMessage());
}
public SMException(String s) {
super(s);
}
};
public static final int EXPECT1 = 0;
public static final int EXPECT2 = 1;
public static final int EXPECT3 = 2;
public static final int EXPECT4 = 3;
public static final int EXPECT5 = 4;
public static final int PROG_OK = 0;
public static final int PROG_CHEATED = -2;
public static final int PROG_FAILED = -1;
public static final int PROG_SUCCEEDED = 1;
public static final int MSG1_LEN = 6;
public static final int MSG2_LEN = 11;
public static final int MSG3_LEN = 8;
public static final int MSG4_LEN = 3;
public static final BigInteger MODULUS_S = new BigInteger(
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1"
+ "29024E088A67CC74020BBEA63B139B22514A08798E3404DD"
+ "EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245"
+ "E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED"
+ "EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D"
+ "C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F"
+ "83655D23DCA3AD961C62F356208552BB9ED529077096966D"
+ "670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF", 16);
public static final BigInteger MODULUS_MINUS_2 = new BigInteger(
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1"
+ "29024E088A67CC74020BBEA63B139B22514A08798E3404DD"
+ "EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245"
+ "E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED"
+ "EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D"
+ "C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F"
+ "83655D23DCA3AD961C62F356208552BB9ED529077096966D"
+ "670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFD", 16);
public static final BigInteger ORDER_S = new BigInteger(
"7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68"
+ "948127044533E63A0105DF531D89CD9128A5043CC71A026E"
+ "F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122"
+ "F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6"
+ "F71C35FDAD44CFD2D74F9208BE258FF324943328F6722D9E"
+ "E1003E5C50B1DF82CC6D241B0E2AE9CD348B1FD47E9267AF"
+ "C1B2AE91EE51D6CB0E3179AB1042A95DCF6A9483B84B4B36"
+ "B3861AA7255E4C0278BA36046511B993FFFFFFFFFFFFFFFF", 16);
public static final byte[] GENERATOR_S = Util.hexStringToBytes("02");
public static final int MOD_LEN_BITS = 1536;
public static final int MOD_LEN_BYTES = 192;
/** Generate a random exponent */
public static BigInteger randomExponent() {
SecureRandom sr = new SecureRandom();
byte[] sb = new byte[MOD_LEN_BYTES];
sr.nextBytes(sb);
return new BigInteger(1, sb);
}
/**
* Hash one or two BigIntegers. To hash only one BigInteger, b may be set to
* NULL.
*/
public static BigInteger hash(int version, BigInteger a, BigInteger b) throws SMException {
try {
MessageDigest sha256 = MessageDigest.getInstance("SHA-256");
sha256.update((byte) version);
sha256.update(SerializationUtils.writeMpi(a));
if (b != null)
sha256.update(SerializationUtils.writeMpi(b));
return new BigInteger(1, sha256.digest());
} catch (NoSuchAlgorithmException e) {
throw new SMException("cannot find SHA-256");
} catch (IOException e) {
throw new SMException("cannot serialize bigint");
}
}
public static byte[] serialize(BigInteger[] ints) throws SMException {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
OtrOutputStream oos = new OtrOutputStream(out);
oos.writeInt(ints.length);
for (BigInteger i : ints) {
oos.writeBigInt(i);
}
byte[] b = out.toByteArray();
oos.close();
return b;
} catch (IOException ex) {
throw new SMException("cannot serialize bigints");
}
}
public static BigInteger[] unserialize(byte[] bytes) throws SMException {
try {
ByteArrayInputStream in = new ByteArrayInputStream(bytes);
OtrInputStream ois = new OtrInputStream(in);
int len = ois.readInt();
if (len > 100)
throw new SMException("Too many ints");
BigInteger[] ints = new BigInteger[len];
for (int i = 0; i < len; i++) {
ints[i] = ois.readBigInt();
}
ois.close();
return ints;
} catch (IOException ex) {
throw new SMException("cannot unserialize bigints");
}
}
/**
* Check that an BigInteger is in the right range to be a (non-unit) group
* element
*/
public static boolean checkGroupElem(BigInteger g) {
return !(g.compareTo(BigInteger.valueOf(2)) > 0 && g.compareTo(SM.MODULUS_MINUS_2) < 0);
}
/**
* Check that an BigInteger is in the right range to be a (non-zero)
* exponent
*/
public static boolean checkExpon(BigInteger x) {
return !(x.compareTo(BigInteger.ONE) > 0 && x.compareTo(SM.ORDER_S) <= 0);
}
/**
* Proof of knowledge of a discrete logarithm
*
* @throws SMException
*/
public static BigInteger[] proofKnowLog(BigInteger g, BigInteger x, int version)
throws SMException {
BigInteger r = randomExponent();
BigInteger temp = g.modPow(r, SM.MODULUS_S);
BigInteger c = hash(version, temp, null);
temp = x.multiply(c).mod(ORDER_S);
BigInteger d = r.subtract(temp).mod(ORDER_S);
BigInteger[] ret = new BigInteger[2];
ret[0] = c;
ret[1] = d;
return ret;
}
/**
* Verify a proof of knowledge of a discrete logarithm. Checks that c =
* h(g^d x^c)
*
* @throws SMException
*/
public static int checkKnowLog(BigInteger c, BigInteger d, BigInteger g, BigInteger x,
int version) throws SMException {
BigInteger gd = g.modPow(d, MODULUS_S);
BigInteger xc = x.modPow(c, MODULUS_S);
BigInteger gdxc = gd.multiply(xc).mod(MODULUS_S);
BigInteger hgdxc = hash(version, gdxc, null);
return hgdxc.compareTo(c);
}
/**
* Proof of knowledge of coordinates with first components being equal
*
* @throws SMException
*/
public static BigInteger[] proofEqualCoords(SMState state, BigInteger r, int version)
throws SMException {
BigInteger r1 = randomExponent();
BigInteger r2 = randomExponent();
/* Compute the value of c, as c = h(g3^r1, g1^r1 g2^r2) */
BigInteger temp1 = state.g1.modPow(r1, MODULUS_S);
BigInteger temp2 = state.g2.modPow(r2, MODULUS_S);
temp2 = temp1.multiply(temp2).mod(MODULUS_S);
temp1 = state.g3.modPow(r1, MODULUS_S);
BigInteger c = hash(version, temp1, temp2);
/* Compute the d values, as d1 = r1 - r c, d2 = r2 - secret c */
temp1 = r.multiply(c).mod(ORDER_S);
BigInteger d1 = r1.subtract(temp1).mod(ORDER_S);
temp1 = state.secret.multiply(c).mod(ORDER_S);
BigInteger d2 = r2.subtract(temp1).mod(ORDER_S);
BigInteger[] ret = new BigInteger[3];
ret[0] = c;
ret[1] = d1;
ret[2] = d2;
return ret;
}
/**
* Verify a proof of knowledge of coordinates with first components being
* equal
*
* @throws SMException
*/
public static int checkEqualCoords(BigInteger c, BigInteger d1, BigInteger d2, BigInteger p,
BigInteger q, SMState state, int version) throws SMException {
/* To verify, we test that hash(g3^d1 * p^c, g1^d1 * g2^d2 * q^c) = c
* If indeed c = hash(g3^r1, g1^r1 g2^r2), d1 = r1 - r*c,
* d2 = r2 - secret*c. And if indeed p = g3^r, q = g1^r * g2^secret
* Then we should have that:
* hash(g3^d1 * p^c, g1^d1 * g2^d2 * q^c)
* = hash(g3^(r1 - r*c + r*c), g1^(r1 - r*c + q*c) *
* g2^(r2 - secret*c + secret*c))
* = hash(g3^r1, g1^r1 g2^r2)
* = c
*/
BigInteger temp2 = state.g3.modPow(d1, MODULUS_S);
BigInteger temp3 = p.modPow(c, MODULUS_S);
BigInteger temp1 = temp2.multiply(temp3).mod(MODULUS_S);
temp2 = state.g1.modPow(d1, MODULUS_S);
temp3 = state.g2.modPow(d2, MODULUS_S);
temp2 = temp2.multiply(temp3).mod(MODULUS_S);
temp3 = q.modPow(c, MODULUS_S);
temp2 = temp3.multiply(temp2).mod(MODULUS_S);
BigInteger cprime = hash(version, temp1, temp2);
return c.compareTo(cprime);
}
/**
* Proof of knowledge of logs with exponents being equal
*
* @throws SMException
*/
public static BigInteger[] proofEqualLogs(SMState state, int version) throws SMException {
BigInteger r = randomExponent();
/* Compute the value of c, as c = h(g1^r, (Qa/Qb)^r) */
BigInteger temp1 = state.g1.modPow(r, MODULUS_S);
BigInteger temp2 = state.qab.modPow(r, MODULUS_S);
BigInteger c = hash(version, temp1, temp2);
/* Compute the d values, as d = r - x3 c */
temp1 = state.x3.multiply(c).mod(ORDER_S);
BigInteger d = r.subtract(temp1).mod(ORDER_S);
BigInteger[] ret = new BigInteger[2];
ret[0] = c;
ret[1] = d;
return ret;
}
/**
* Verify a proof of knowledge of logs with exponents being equal
*
* @throws SMException
*/
public static int checkEqualLogs(BigInteger c, BigInteger d, BigInteger r, SMState state,
int version) throws SMException {
/* Here, we recall the exponents used to create g3.
* If we have previously seen g3o = g1^x where x is unknown
* during the DH exchange to produce g3, then we may proceed with:
*
* To verify, we test that hash(g1^d * g3o^c, qab^d * r^c) = c
* If indeed c = hash(g1^r1, qab^r1), d = r1- x * c
* And if indeed r = qab^x
* Then we should have that:
* hash(g1^d * g3o^c, qab^d r^c)
* = hash(g1^(r1 - x*c + x*c), qab^(r1 - x*c + x*c))
* = hash(g1^r1, qab^r1)
* = c
*/
BigInteger temp2 = state.g1.modPow(d, MODULUS_S);
BigInteger temp3 = state.g3o.modPow(c, MODULUS_S);
BigInteger temp1 = temp2.multiply(temp3).mod(MODULUS_S);
temp3 = state.qab.modPow(d, MODULUS_S);
temp2 = r.modPow(c, MODULUS_S);
temp2 = temp3.multiply(temp2).mod(MODULUS_S);
BigInteger cprime = hash(version, temp1, temp2);
return c.compareTo(cprime);
}
/**
* Create first message in SMP exchange. Input is Alice's secret value which
* this protocol aims to compare to Bob's. The return value is a serialized
* BigInteger array whose elements correspond to the following: [0] = g2a,
* Alice's half of DH exchange to determine g2 [1] = c2, [2] = d2, Alice's
* ZK proof of knowledge of g2a exponent [3] = g3a, Alice's half of DH
* exchange to determine g3 [4] = c3, [5] = d3, Alice's ZK proof of
* knowledge of g3a exponent
*
* @throws SMException
*/
public static byte[] step1(SMState astate, byte[] secret) throws SMException {
/* Initialize the sm state or update the secret */
//Util.checkBytes("secret", secret);
BigInteger secret_mpi = new BigInteger(1, secret);
astate.secret = secret_mpi;
astate.receivedQuestion = false;
astate.x2 = randomExponent();
astate.x3 = randomExponent();
BigInteger[] msg1 = new BigInteger[6];
msg1[0] = astate.g1.modPow(astate.x2, MODULUS_S);
BigInteger[] res = proofKnowLog(astate.g1, astate.x2, 1);
msg1[1] = res[0];
msg1[2] = res[1];
msg1[3] = astate.g1.modPow(astate.x3, MODULUS_S);
res = proofKnowLog(astate.g1, astate.x3, 2);
msg1[4] = res[0];
msg1[5] = res[1];
byte[] ret = serialize(msg1);
astate.smProgState = PROG_OK;
return ret;
}
/**
* Receive the first message in SMP exchange, which was generated by step1.
* Input is saved until the user inputs their secret information. No output.
*
* @throws SMException
*/
public static void step2a(SMState bstate, byte[] input, boolean received_question)
throws SMException {
/* Initialize the sm state if needed */
bstate.receivedQuestion = received_question;
bstate.smProgState = PROG_CHEATED;
/* Read from input to find the mpis */
BigInteger[] msg1 = unserialize(input);
if (checkGroupElem(msg1[0]) || checkExpon(msg1[2]) || checkGroupElem(msg1[3])
|| checkExpon(msg1[5])) {
throw new SMException("Invalid parameter");
}
/* Store Alice's g3a value for later in the protocol */
bstate.g3o = msg1[3];
/* Verify Alice's proofs */
if (checkKnowLog(msg1[1], msg1[2], bstate.g1, msg1[0], 1) != 0
|| checkKnowLog(msg1[4], msg1[5], bstate.g1, msg1[3], 2) != 0) {
throw new SMException("Proof checking failed");
}
/* Create Bob's half of the generators g2 and g3 */
bstate.x2 = randomExponent();
bstate.x3 = randomExponent();
/* Combine the two halves from Bob and Alice and determine g2 and g3 */
bstate.g2 = msg1[0].modPow(bstate.x2, MODULUS_S);
//Util.checkBytes("g2b", bstate.g2.getValue());
bstate.g3 = msg1[3].modPow(bstate.x3, MODULUS_S);
//Util.checkBytes("g3b", bstate.g3.getValue());
bstate.smProgState = PROG_OK;
}
/**
* Create second message in SMP exchange. Input is Bob's secret value.
* Information from earlier steps in the exchange is taken from Bob's state.
* Output is a serialized mpi array whose elements correspond to the
* following: [0] = g2b, Bob's half of DH exchange to determine g2 [1] = c2,
* [2] = d2, Bob's ZK proof of knowledge of g2b exponent [3] = g3b, Bob's
* half of DH exchange to determine g3 [4] = c3, [5] = d3, Bob's ZK proof of
* knowledge of g3b exponent [6] = pb, [7] = qb, Bob's halves of the (Pa/Pb)
* and (Qa/Qb) values [8] = cp, [9] = d5, [10] = d6, Bob's ZK proof that pb,
* qb formed correctly
*
* @throws SMException
*/
public static byte[] step2b(SMState bstate, byte[] secret) throws SMException {
/* Convert the given secret to the proper form and store it */
//Util.checkBytes("secret", secret);
BigInteger secret_mpi = new BigInteger(1, secret);
bstate.secret = secret_mpi;
BigInteger[] msg2 = new BigInteger[11];
msg2[0] = bstate.g1.modPow(bstate.x2, MODULUS_S);
BigInteger[] res = proofKnowLog(bstate.g1, bstate.x2, 3);
msg2[1] = res[0];
msg2[2] = res[1];
msg2[3] = bstate.g1.modPow(bstate.x3, MODULUS_S);
res = proofKnowLog(bstate.g1, bstate.x3, 4);
msg2[4] = res[0];
msg2[5] = res[1];
/* Calculate P and Q values for Bob */
BigInteger r = randomExponent();
//BigInteger r = new BigInteger(SM.GENERATOR_S);
bstate.p = bstate.g3.modPow(r, MODULUS_S);
//Util.checkBytes("Pb", bstate.p.getValue());
msg2[6] = bstate.p;
BigInteger qb1 = bstate.g1.modPow(r, MODULUS_S);
//Util.checkBytes("Qb1", qb1.getValue());
BigInteger qb2 = bstate.g2.modPow(bstate.secret, MODULUS_S);
//Util.checkBytes("Qb2", qb2.getValue());
//Util.checkBytes("g2", bstate.g2.getValue());
//Util.checkBytes("secret", bstate.secret.getValue());
bstate.q = qb1.multiply(qb2).mod(MODULUS_S);
//Util.checkBytes("Qb", bstate.q.getValue());
msg2[7] = bstate.q;
res = proofEqualCoords(bstate, r, 5);
msg2[8] = res[0];
msg2[9] = res[1];
msg2[10] = res[2];
/* Convert to serialized form */
return serialize(msg2);
}
/**
* Create third message in SMP exchange. Input is a message generated by
* otrl_sm_step2b. Output is a serialized mpi array whose elements
* correspond to the following: [0] = pa, [1] = qa, Alice's halves of the
* (Pa/Pb) and (Qa/Qb) values [2] = cp, [3] = d5, [4] = d6, Alice's ZK proof
* that pa, qa formed correctly [5] = ra, calculated as (Qa/Qb)^x3 where x3
* is the exponent used in g3a [6] = cr, [7] = d7, Alice's ZK proof that ra
* is formed correctly
*
* @throws SMException
*/
public static byte[] step3(SMState astate, byte[] input) throws SMException {
/* Read from input to find the mpis */
astate.smProgState = PROG_CHEATED;
BigInteger[] msg2 = unserialize(input);
if (checkGroupElem(msg2[0]) || checkGroupElem(msg2[3]) || checkGroupElem(msg2[6])
|| checkGroupElem(msg2[7]) || checkExpon(msg2[2]) || checkExpon(msg2[5])
|| checkExpon(msg2[9]) || checkExpon(msg2[10])) {
throw new SMException("Invalid Parameter");
}
BigInteger[] msg3 = new BigInteger[8];
/* Store Bob's g3a value for later in the protocol */
astate.g3o = msg2[3];
/* Verify Bob's knowledge of discreet log proofs */
if (checkKnowLog(msg2[1], msg2[2], astate.g1, msg2[0], 3) != 0
|| checkKnowLog(msg2[4], msg2[5], astate.g1, msg2[3], 4) != 0) {
throw new SMException("Proof checking failed");
}
/* Combine the two halves from Bob and Alice and determine g2 and g3 */
astate.g2 = msg2[0].modPow(astate.x2, MODULUS_S);
//Util.checkBytes("g2a", astate.g2.getValue());
astate.g3 = msg2[3].modPow(astate.x3, MODULUS_S);
//Util.checkBytes("g3a", astate.g3.getValue());
/* Verify Bob's coordinate equality proof */
if (checkEqualCoords(msg2[8], msg2[9], msg2[10], msg2[6], msg2[7], astate, 5) != 0)
throw new SMException("Invalid Parameter");
/* Calculate P and Q values for Alice */
BigInteger r = randomExponent();
//BigInteger r = new BigInteger(SM.GENERATOR_S);
astate.p = astate.g3.modPow(r, MODULUS_S);
//Util.checkBytes("Pa", astate.p.getValue());
msg3[0] = astate.p;
BigInteger qa1 = astate.g1.modPow(r, MODULUS_S);
//Util.checkBytes("Qa1", qa1.getValue());
BigInteger qa2 = astate.g2.modPow(astate.secret, MODULUS_S);
//Util.checkBytes("Qa2", qa2.getValue());
//Util.checkBytes("g2", astate.g2.getValue());
//Util.checkBytes("secret", astate.secret.getValue());
astate.q = qa1.multiply(qa2).mod(MODULUS_S);
msg3[1] = astate.q;
//Util.checkBytes("Qa", astate.q.getValue());
BigInteger[] res = proofEqualCoords(astate, r, 6);
msg3[2] = res[0];
msg3[3] = res[1];
msg3[4] = res[2];
/* Calculate Ra and proof */
BigInteger inv = msg2[6].modInverse(MODULUS_S);
astate.pab = astate.p.multiply(inv).mod(MODULUS_S);
inv = msg2[7].modInverse(MODULUS_S);
astate.qab = astate.q.multiply(inv).mod(MODULUS_S);
msg3[5] = astate.qab.modPow(astate.x3, MODULUS_S);
res = proofEqualLogs(astate, 7);
msg3[6] = res[0];
msg3[7] = res[1];
byte[] output = serialize(msg3);
astate.smProgState = PROG_OK;
return output;
}
/**
* Create final message in SMP exchange. Input is a message generated by
* otrl_sm_step3. Output is a serialized mpi array whose elements correspond
* to the following: [0] = rb, calculated as (Qa/Qb)^x3 where x3 is the
* exponent used in g3b [1] = cr, [2] = d7, Bob's ZK proof that rb is formed
* correctly This method also checks if Alice and Bob's secrets were the
* same. If so, it returns NO_ERROR. If the secrets differ, an INV_VALUE
* error is returned instead.
*
* @throws SMException
*/
public static byte[] step4(SMState bstate, byte[] input) throws SMException {
/* Read from input to find the mpis */
BigInteger[] msg3 = unserialize(input);
bstate.smProgState = PROG_CHEATED;
BigInteger[] msg4 = new BigInteger[3];
if (checkGroupElem(msg3[0]) || checkGroupElem(msg3[1]) || checkGroupElem(msg3[5])
|| checkExpon(msg3[3]) || checkExpon(msg3[4]) || checkExpon(msg3[7])) {
throw new SMException("Invalid Parameter");
}
/* Verify Alice's coordinate equality proof */
if (checkEqualCoords(msg3[2], msg3[3], msg3[4], msg3[0], msg3[1], bstate, 6) != 0)
throw new SMException("Invalid Parameter");
/* Find Pa/Pb and Qa/Qb */
BigInteger inv = bstate.p.modInverse(MODULUS_S);
bstate.pab = msg3[0].multiply(inv).mod(MODULUS_S);
inv = bstate.q.modInverse(MODULUS_S);
bstate.qab = msg3[1].multiply(inv).mod(MODULUS_S);
/* Verify Alice's log equality proof */
if (checkEqualLogs(msg3[6], msg3[7], msg3[5], bstate, 7) != 0) {
throw new SMException("Proof checking failed");
}
/* Calculate Rb and proof */
msg4[0] = bstate.qab.modPow(bstate.x3, MODULUS_S);
BigInteger[] res = proofEqualLogs(bstate, 8);
msg4[1] = res[0];
msg4[2] = res[1];
byte[] output = serialize(msg4);
/* Calculate Rab and verify that secrets match */
BigInteger rab = msg3[5].modPow(bstate.x3, MODULUS_S);
//Util.checkBytes("rab", rab.getValue());
//Util.checkBytes("pab", bstate.pab.getValue());
int comp = rab.compareTo(bstate.pab);
bstate.smProgState = (comp != 0) ? PROG_FAILED : PROG_SUCCEEDED;
return output;
}
/**
* Receives the final SMP message, which was generated in otrl_sm_step. This
* method checks if Alice and Bob's secrets were the same. If so, it returns
* NO_ERROR. If the secrets differ, an INV_VALUE error is returned instead.
*
* @throws SMException
*/
public static void step5(SMState astate, byte[] input) throws SMException {
/* Read from input to find the mpis */
BigInteger[] msg4 = unserialize(input);
astate.smProgState = PROG_CHEATED;
if (checkGroupElem(msg4[0]) || checkExpon(msg4[2])) {
throw new SMException("Invalid Parameter");
}
/* Verify Bob's log equality proof */
if (checkEqualLogs(msg4[1], msg4[2], msg4[0], astate, 8) != 0)
throw new SMException("Invalid Parameter");
/* Calculate Rab and verify that secrets match */
BigInteger rab = msg4[0].modPow(astate.x3, MODULUS_S);
//Util.checkBytes("rab", rab.getValue());
//Util.checkBytes("pab", astate.pab.getValue());
int comp = rab.compareTo(astate.pab);
if (comp != 0) {
//System.out.println("checking failed");
}
astate.smProgState = (comp != 0) ? PROG_FAILED : PROG_SUCCEEDED;
return;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alachisoft.tayzgrid.cluster.blocks;
import com.alachisoft.tayzgrid.cluster.Transport;
import com.alachisoft.tayzgrid.cluster.Message;
import com.alachisoft.tayzgrid.cluster.OperationResponse;
import com.alachisoft.tayzgrid.cluster.Global;
import com.alachisoft.tayzgrid.cluster.View;
import com.alachisoft.tayzgrid.cluster.util.Command;
import com.alachisoft.tayzgrid.cluster.util.Rsp;
import com.alachisoft.tayzgrid.cluster.util.RspList;
import com.alachisoft.tayzgrid.common.GenericCopier;
import com.alachisoft.tayzgrid.common.logger.ILogger;
import com.alachisoft.tayzgrid.common.monitoring.ServerMonitor;
import com.alachisoft.tayzgrid.common.net.Address;
import com.alachisoft.tayzgrid.common.ServicePropValues;
import com.alachisoft.tayzgrid.common.threading.Monitor;
import com.alachisoft.tayzgrid.common.threading.ThreadUtil;
import com.alachisoft.tayzgrid.runtime.util.TimeSpan;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
// $Id: GroupRequest.java,v 1.8 2004/09/05 04:54:22 ovidiuf Exp $
/**
* Sends a message to all members of the group and waits for all responses (or timeout). Returns a boolean value (success or failure). Results (if any) can be retrieved when
* _done.<p> The supported transport to send requests is currently either a RequestCorrelator or a generic Transport. One of them has to be given in the constructor. It will then
* be used to send a request. When a message is received by either one, the receiveResponse() of this class has to be called (this class does not actively receive
* requests/responses itself). Also, when a view change or suspicion is received, the methods viewChange() or suspect() of this class have to be called.<p> When started, an array
* of responses, correlating to the membership, is created. Each response is added to the corresponding field in the array. When all fields have been set, the algorithm terminates.
* This algorithm can optionally use a suspicion service (failure detector) to detect (and exclude from the membership) fauly members. If no suspicion service is available,
* timeouts can be used instead (see
* <code>execute()</code>). When _done, a list of suspected members can be retrieved.<p> Because a channel might deliver requests, and responses to <em>different</em> requests, the
* <code>GroupRequest</code> class cannot itself receive and process requests/responses from the channel. A mechanism outside this class has to do this; it has to determine what
* the responses are for the message sent by the
* <code>execute()</code> method and call
* <code>receiveResponse()</code> to do so.<p> <b>Requirements</b>: lossless delivery, e.g. acknowledgment-based message confirmation.
*
* <author> Bela Ban </author> <version> $Revision: 1.8 $ </version>
*/
public class GroupRequest implements RspCollector, Command
{
/**
* Returns the results as a RspList
*/
public RspList getResults()
{
RspList retval = new RspList();
Address sender;
synchronized (rsp_mutex)
{
for (int i = 0; i < membership.length; i++)
{
sender = membership[i];
switch (received[i])
{
case SUSPECTED:
retval.addSuspect(sender);
break;
case RECEIVED:
retval.addRsp(sender, responses[i]);
break;
case NOT_RECEIVED:
retval.addNotReceived(sender);
break;
}
}
return retval;
}
}
public int getNumSuspects()
{
return suspects.size();
}
public java.util.List getSuspects()
{
return suspects;
}
public boolean getDone()
{
return _done;
}
/**
* Generates a new unique request ID
*/
private static long getRequestId()
{
synchronized (req_mutex)
{
if (last_req_id == Long.MAX_VALUE)
{
last_req_id = -1;
}
long result = ++last_req_id;
return result;
}
}
public final void AddNHop(Address sender)
{
synchronized (_nhopMutex)
{
expectedNHopResponses++;
if (!nHops.contains(sender))
{
nHops.add(sender);
}
}
}
public final void AddNHopDefaultStatus(Address sender)
{
if (!receivedFromNHops.containsKey(sender))
{
receivedFromNHops.put(sender, NOT_RECEIVED);
}
}
protected boolean getResponses()
{
int num_not_received = getNum(NOT_RECEIVED);
int num_received = getNum(RECEIVED);
int num_suspected = getNum(SUSPECTED);
int num_total = membership.length;
int num_receivedFromNHops = getNumFromNHops(RECEIVED);
int num_suspectedNHops = getNumFromNHops(SUSPECTED);
int num_okResponsesFromNHops = num_receivedFromNHops + num_suspectedNHops;
switch (rsp_mode)
{
case GET_FIRST:
if (num_received > 0)
{
return true;
}
if (num_suspected >= num_total)
// e.g. 2 members, and both suspected
{
return true;
}
break;
case GET_FIRST_NHOP:
if (num_received > 0 && num_okResponsesFromNHops == expectedNHopResponses)
{
return true;
}
if (num_suspected >= num_total)
{
return true;
}
break;
case GET_ALL:
if (num_not_received > 0)
{
return false;
}
return true;
case GET_ALL_NHOP:
if (num_not_received > 0)
{
return false;
}
if (num_okResponsesFromNHops < expectedNHopResponses)
{
return false;
}
return true;
case GET_N:
if (expected_mbrs >= num_total)
{
rsp_mode = GET_ALL;
return getResponses();
}
if (num_received >= expected_mbrs)
{
return true;
}
if (num_received + num_not_received < expected_mbrs)
{
if (num_received + num_suspected >= expected_mbrs)
{
return true;
}
return false;
}
return false;
case GET_NONE:
return true;
default:
getCacheLog().Error("rsp_mode " + rsp_mode + " unknown !");
break;
}
return false;
}
public static final byte GET_FIRST = 1;
public static final byte GET_ALL = 2;
public static final byte GET_N = 3;
public static final byte GET_NONE = 4;
public static final byte GET_FIRST_NHOP = 5;
public static final byte GET_ALL_NHOP = 6;
private static final byte NOT_RECEIVED = 0;
private static final byte RECEIVED = 1;
private static final byte SUSPECTED = 2;
private Address[] membership = null; // current membership
private Object[] responses = null; // responses corresponding to membership
private byte[] received = null; // status of response for each mbr (see above)
private long[] timeStats = null; // responses corresponding to membership
/**
* replica nodes in the cluster from where we are expecting responses. Following is the detail of how it works. 1. In case of synchronous POR, when an operation is transferred
* to main node through clustering layer, main node does the following: - a) it executes the operation on itself. b) it transfers the operation to its replica (the next hop).
* c) it sends the response of this operation back and as part of this response, it informs the node that another response is expected from replica node (the next hop). 2. this
* dictionary is filled with the replica addresses (next hop addresses) received as part of the response from main node along with the status (RECEIVED/NOT_RECEIVED...).
*/
private java.util.HashMap<Address, Byte> receivedFromNHops = new java.util.HashMap<Address, Byte>();
/**
* list of next hop members.
*/
//: List changed to ArrayList
private ArrayList<Address> nHops = new ArrayList<Address>();
/**
* number of responses expected from next hops. When one node send requests to other node (NHop Request), the node may or may not send the same request to next hop depending on
* the success/failure of the request on this node. this counter tells how many requests were sent to next hops and their responses are now expected.
*/
private int expectedNHopResponses = 0;
private Object _nhopMutex = new Object();
/**
* bounded queue of suspected members
*/
private java.util.List suspects = Collections.synchronizedList(new java.util.ArrayList(10));
/**
* list of members, changed by viewChange()
*/
private java.util.List members = Collections.synchronizedList(new java.util.ArrayList(10));
/**
* the list of all the current members in the cluster. this list is different from the members list of the Group Request which only contains the addresses of members to which
* this group request must send the message. list of total membership is used to determine which member has been suspected after the new list of members is received through
* view change event.
*/
private java.util.List clusterMembership = Collections.synchronizedList(new java.util.ArrayList(10));
/**
* keep suspects vector bounded
*/
private int max_suspects = 40;
protected Message request_msg = null;
protected RequestCorrelator corr = null; // either use RequestCorrelator or ...
protected Transport transport = null; // Transport (one of them has to be non-null)
protected byte rsp_mode = GET_ALL;
private boolean _done = false;
protected final Object rsp_mutex = new Object();
protected long timeout = 0;
protected int expected_mbrs = 0;
/**
* to generate unique request IDs (see getRequestId())
*/
private static long last_req_id = -1;
protected long req_id = -1; // request ID for this request
private static Object req_mutex = new Object();
private ILogger _ncacheLog;
private ILogger getCacheLog()
{
return _ncacheLog;
}
private static boolean s_allowRequestEnquiry;
private static int s_requestEnquiryInterval = 20;
private static int s_requestEnquiryRetries = 1;
private boolean _seqReset;
private int _retriesAfteSeqReset;
static
{
String str = ServicePropValues.CacheServer_AllowRequestEnquiry;
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(str))
{
s_allowRequestEnquiry = Boolean.parseBoolean(str);
}
if (s_allowRequestEnquiry)
{
str = ServicePropValues.CacheServer_RequestEnquiryInterval;
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(str))
{
s_requestEnquiryInterval = Integer.parseInt(str);
}
str = ServicePropValues.CacheServer_RequestEnquiryRetries;
if (!tangible.DotNetToJavaStringHelper.isNullOrEmpty(str))
{
s_requestEnquiryRetries = Integer.parseInt(str);
if (s_requestEnquiryRetries <= 0)
{
s_requestEnquiryRetries = 1;
}
}
}
}
/**
* @param m The message to be sent
*
* @param corr The request correlator to be used. A request correlator sends requests tagged with a unique ID and notifies the sender when matching responses are received. The
* reason
* <code>GroupRequest</code> uses it instead of a
* <code>Transport</code> is that multiple requests/responses might be sent/received concurrently.
*
* @param members The initial membership. This value reflects the membership to which the request is sent (and from which potential responses are expected). Is reset by
* reset().
*
* @param rsp_mode How many responses are expected. Can be <ol> <li><code>GET_ALL</code>: wait for all responses from non-suspected members. A suspicion service might warn us
* when a member from which a response is outstanding has crashed, so it can be excluded from the responses. If no suspision service is available, a timeout can be used (a
* value of 0 means wait forever). <em>If a timeout of 0 is used, no suspicion service is available and a member from which we expect a response has crashed, this methods
* blocks forever !</em>. <li><code>GET_FIRST</code>: wait for the first available response. <li><code>GET_MAJORITY</code>: wait for the majority of all responses. The majority
* is re-computed when a member is suspected. <li><code>GET_ABS_MAJORITY</code>: wait for the majority of <em>all</em> members. This includes failed members, so it may block if
* no timeout is specified. <li><code>GET_N</CODE>: wait for N members. Return if n is >= membership+suspects. <li><code>GET_NONE</code>: don't wait for any response.
* Essentially send an asynchronous message to the group members. </ol>
*
*/
public GroupRequest(Message m, RequestCorrelator corr, java.util.List members, java.util.List clusterCompleteMembership, byte rsp_mode, ILogger NCacheLog)
{
request_msg = m;
this.corr = corr;
this.rsp_mode = rsp_mode;
this._ncacheLog = NCacheLog;
this.clusterMembership = clusterCompleteMembership;
reset(members);
}
/**
* @param timeout Time to wait for responses (ms). A value of <= 0 means wait indefinitely (e.g. if a suspicion service is available; timeouts are not needed).
*
*/
public GroupRequest(Message m, RequestCorrelator corr, java.util.List members, java.util.List clusterCompleteMembership, byte rsp_mode, long timeout, int expected_mbrs, ILogger NCacheLog)
{
this(m, corr, members, clusterCompleteMembership, rsp_mode, NCacheLog);
if (timeout > 0)
{
this.timeout = timeout;
}
this.expected_mbrs = expected_mbrs;
}
public GroupRequest(Message m, Transport transport, java.util.List members, java.util.List clusterCompleteMembership, byte rsp_mode, ILogger NCacheLog)
{
request_msg = m;
this.transport = transport;
this.rsp_mode = rsp_mode;
this._ncacheLog = NCacheLog;
this.clusterMembership = clusterCompleteMembership;
reset(members);
}
/**
* @param timeout Time to wait for responses (ms). A value of <= 0 means wait indefinitely (e.g. if a suspicion service is available; timeouts are not needed).
*
*/
public GroupRequest(Message m, Transport transport, java.util.List members, java.util.List clusterCompleteMembership, byte rsp_mode, long timeout, int expected_mbrs, ILogger NCacheLog)
{
this(m, transport, members, clusterCompleteMembership, rsp_mode, NCacheLog);
if (timeout > 0)
{
this.timeout = timeout;
}
this.expected_mbrs = expected_mbrs;
}
/**
* Sends the message. Returns when n responses have been received, or a timeout has occurred. <em>n</em> can be the first response, all responses, or a majority of the
* responses.
*/
public boolean execute()
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.Exec", "mode :" + rsp_mode);
}
boolean retval;
if (corr == null && transport == null)
{
getCacheLog().Error("GroupRequest.execute()", "both corr and transport are null, cannot send group request");
return false;
}
synchronized (rsp_mutex)
{
_done = false;
retval = doExecute(timeout);
if (retval == false)
{
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.execute()", "call did not execute correctly, request is " + toString());
}
}
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.Exec", "exited; result:" + retval);
}
_done = true;
return retval;
}
}
/**
* Resets the group request, so it can be reused for another execution.
*/
public void reset(Message m, byte mode, long timeout)
{
synchronized (rsp_mutex)
{
_done = false;
request_msg = m;
rsp_mode = mode;
this.timeout = timeout;
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
public void reset(Message m, java.util.ArrayList members, byte rsp_mode, long timeout, int expected_rsps)
{
synchronized (rsp_mutex)
{
reset(m, rsp_mode, timeout);
reset(members);
this.expected_mbrs = expected_rsps;
Monitor.pulse(rsp_mutex);//Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
/**
* This method sets the
* <code>membership</code> variable to the value of
* <code>members</code>. It requires that the caller already hold the
* <code>rsp_mutex</code> lock.
*
* @param mbrs The new list of members
*
*/
public void reset(java.util.List mbrs)
{
if (mbrs != null)
{
int size = mbrs.size();
membership = new Address[size];
responses = new Object[size];
received = new byte[size];
timeStats = new long[size];
for (int i = 0; i < size; i++)
{
membership[i] = (Address) mbrs.get(i);
responses[i] = null;
received[i] = NOT_RECEIVED;
timeStats[i] = 0;
}
this.members.clear();
this.members.addAll(mbrs);
}
else
{
if (membership != null)
{
for (int i = 0; i < membership.length; i++)
{
responses[i] = null;
received[i] = NOT_RECEIVED;
}
}
}
}
public final void SequenceReset()
{
synchronized (rsp_mutex)
{
_seqReset = true;
_retriesAfteSeqReset = 0;
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
/*
* ---------------------- Interface RspCollector --------------------------
*/
/**
* <b>Callback</b> (called by RequestCorrelator or Transport). Adds a response to the response table. When all responses have been received,
* <code>execute()</code> returns.
*/
public void receiveResponse(Message m)
{
Address sender = m.getSrc(), mbr;
Object val = null;
if (_done)
{
getCacheLog().Warn("GroupRequest.receiveResponse()", "command is done; cannot add response !");
return;
}
if (suspects != null && suspects.size() > 0 && suspects.contains(sender))
{
getCacheLog().Warn("GroupRequest.receiveResponse()", "received response from suspected member " + sender + "; discarding");
return;
}
if (m.getLength() > 0)
{
try
{
if (m.responseExpected)
{
OperationResponse opRes = new OperationResponse();
opRes.SerializablePayload = m.getFlatObject();
opRes.UserPayload = m.getPayload();
val = opRes;
}
else
{
val = m.getFlatObject();
}
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.receiveResponse()", "exception=" + e.getMessage());
}
}
synchronized (rsp_mutex)
{
boolean isMainMember = false;
for (int i = 0; i < membership.length; i++)
{
mbr = membership[i];
if (mbr.equals(sender))
{
isMainMember = true;
if (received[i] == NOT_RECEIVED)
{
responses[i] = val;
received[i] = RECEIVED;
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().CriticalInfo("GroupRequest.receiveResponse()", "received response for request " + req_id + ", sender=" + sender + ", val=" + val);
}
Monitor.pulse(rsp_mutex);// rsp_mutex.notifyAll(); // wakes up execute()
break;
}
}
}
if (!isMainMember)
{
receivedFromNHops.put(sender, RECEIVED);
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
}
/**
* <b>Callback</b> (called by RequestCorrelator or Transport). Report to
* <code>GroupRequest</code> that a member is reported as faulty (suspected). This method would probably be called when getting a suspect message from a failure detector (where
* available). It is used to exclude faulty members from the response list.
*/
public void suspect(Address suspected_member)
{
Address mbr;
boolean isMainMember = false;
synchronized (rsp_mutex)
{
// modify 'suspects' and 'responses' array
for (int i = 0; i < membership.length; i++)
{
mbr = membership[i];
if (mbr.equals(suspected_member))
{
isMainMember = true;
addSuspect(suspected_member);
responses[i] = null;
received[i] = SUSPECTED;
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
break;
}
}
if (!isMainMember)
{
if (clusterMembership != null && clusterMembership.contains(suspected_member))
{
receivedFromNHops.put(suspected_member, SUSPECTED);
}
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
}
/**
* Any member of 'membership' that is not in the new view is flagged as SUSPECTED. Any member in the new view that is <em>not</em> in the membership (ie, the set of responses
* expected for the current RPC) will <em>not</em> be added to it. If we did this we might run into the following problem: <ul> <li>Membership is {A,B} <li>A sends a
* synchronous group RPC (which sleeps for 60 secs in the invocation handler) <li>C joins while A waits for responses from A and B <li>If this would generate a new view {A,B,C}
* and if this expanded the response set to {A,B,C}, A would wait forever on C's response because C never received the request in the first place, therefore won't send a
* response. </ul>
*
* @param new_view
*/
@Override
public void viewChange(View new_view)
{
Address mbr;
java.util.List mbrs = new_view != null ? new_view.getMembers() : null;
if (membership == null || membership.length == 0 || mbrs == null)
{
return;
}
synchronized (rsp_mutex)
{
Object tempVar = GenericCopier.DeepCopy(clusterMembership);//clusterMembership.clone();
java.util.ArrayList oldMembership = clusterMembership != null ? (java.util.ArrayList) ((tempVar instanceof java.util.ArrayList) ? tempVar : null) : null;
clusterMembership.clear();
clusterMembership.addAll(mbrs);
this.members.clear();
this.members.addAll(mbrs);
for (int i = 0; i < membership.length; i++)
{
mbr = membership[i];
if (!mbrs.contains(mbr))
{
addSuspect(mbr);
responses[i] = null;
received[i] = SUSPECTED;
}
if (oldMembership != null)
{
oldMembership.remove(mbr);
}
}
//by this time, membershipClone cotains all those members that are not part of
//group request normal membership and are no longer part of the cluster membership
//according to the new view.
//this way we are suspecting replica members.
if (oldMembership != null)
{
for (Iterator it = oldMembership.iterator(); it.hasNext();)
{
Address member = (Address) it.next();
if (!mbrs.contains(member))
{
receivedFromNHops.put(member, SUSPECTED);
}
}
}
Monitor.pulse(rsp_mutex);//rsp_mutex.notifyAll();
}
}
/*
* -------------------- End of Interface RspCollector -----------------------------------
*/
@Override
public String toString()
{
StringBuilder ret = new StringBuilder();
ret.append("[GroupRequest:\n");
ret.append("req_id=").append(req_id).append('\n');
ret.append("members: ");
for (int i = 0; i < membership.length; i++)
{
ret.append(membership[i] + " ");
}
ret.append("\nresponses: ");
for (int i = 0; i < responses.length; i++)
{
ret.append(responses[i] + " ");
}
if (suspects.size() > 0)
{
ret.append("\nsuspects: " + Global.CollectionToString(suspects));
}
ret.append("\nrequest_msg: " + request_msg);
ret.append("\nrsp_mode: " + rsp_mode);
ret.append("\ndone: " + _done);
ret.append("\ntimeout: " + timeout);
ret.append("\nexpected_mbrs: " + expected_mbrs);
ret.append("\n]");
return ret.toString();
}
/*
* --------------------------------- Private Methods -------------------------------------
*/
/**
* This method runs with rsp_mutex locked (called by
* <code>execute()</code>).
*/
protected boolean doExecute_old(long timeout)
{
long start_time = 0;
Address mbr, suspect;
if (rsp_mode != GET_NONE)
{
req_id = corr.getNextRequestId();
}
reset(null); // clear 'responses' array
if (suspects != null)
{
// mark all suspects in 'received' array
for (int i = 0; i < suspects.size(); i++)
{
suspect = (Address) suspects.get(i);
for (int j = 0; j < membership.length; j++)
{
mbr = membership[j];
if (mbr.equals(suspect))
{
received[j] = SUSPECTED;
break; // we can break here because we ensure there are no duplicate members
}
}
}
}
try
{
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "sending request (id=" + req_id + ')');
}
if (corr != null)
{
java.util.List tmp = members != null ? members : null;
corr.sendRequest(req_id, tmp, request_msg, rsp_mode == GET_NONE ? null : this);
}
else
{
transport.send(request_msg);
}
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute()", "exception=" + e.getMessage());
if (corr != null)
{
corr.done(req_id);
}
return false;
}
long orig_timeout = timeout;
if (timeout <= 0)
{
while (true)
{
/*
* Wait for responses:
*/
adjustMembership(); // may not be necessary, just to make sure...
if (getResponses())
{
if (corr != null)
{
corr.done(req_id);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "received all responses: " + toString());
}
return true;
}
try
{
Monitor.wait(rsp_mutex);//rsp_mutex.wait();
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute():2", "exception=" + e.getMessage());
}
}
}
else
{
start_time = System.currentTimeMillis();
while (timeout > 0)
{
/*
* Wait for responses:
*/
if (getResponses())
{
if (corr != null)
{
corr.done(req_id);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "received all responses: " + toString());
}
return true;
}
//: changed to Syste.currentTimeinMillis, accuracy issue expected
timeout = orig_timeout - (System.currentTimeMillis() - start_time);
//timeout = orig_timeout - ((System.currentTimeMillis() - 621355968000000000L) / 10000 - start_time);
if (timeout > 0)
{
try
{
Monitor.wait(rsp_mutex,timeout);//rsp_mutex.wait(timeout);
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute():3", "exception=" + e);
//e.printStacknTrace();
}
}
}
//SAL:
if (timeout <= 0)
{
RspList rspList = getResults();
String failedNodes = "";
if (rspList != null)
{
for (int i = 0; i < rspList.size(); i++)
{
Object tempVar = rspList.elementAt(i);
Rsp rsp = (Rsp) ((tempVar instanceof Rsp) ? tempVar : null);
if (rsp != null && !rsp.wasReceived())
{
failedNodes += rsp.getSender();
}
}
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute:", "[ " + req_id + " ] did not receive rsp from " + failedNodes + " [Timeout] " + timeout + " [timeout-val ="
+ orig_timeout + "]");
}
}
if (corr != null)
{
corr.done(req_id);
}
return false;
}
}
protected boolean doExecute(long timeout)
{
long start_time = 0;
Address mbr, suspect;
if (rsp_mode != GET_NONE)
{
req_id = corr.getNextRequestId();
}
reset(null); // clear 'responses' array
if (suspects != null)
{
// mark all suspects in 'received' array
for (int i = 0; i < suspects.size(); i++)
{
suspect = (Address) suspects.get(i);
for (int j = 0; j < membership.length; j++)
{
mbr = membership[j];
if (mbr.equals(suspect))
{
received[j] = SUSPECTED;
break; // we can break here because we ensure there are no duplicate members
}
}
}
}
try
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.doExec", "sending req_id :" + req_id + "; timeout: " + timeout);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "sending request (id=" + req_id + ')');
}
if (corr != null)
{
java.util.List tmp = members != null ? members : null;
if (rsp_mode == GET_FIRST_NHOP || rsp_mode == GET_ALL_NHOP)
{
corr.sendNHopRequest(req_id, tmp, request_msg, this);
}
else
{
corr.sendRequest(req_id, tmp, request_msg, rsp_mode == GET_NONE ? null : this);
}
}
else
{
transport.send(request_msg);
}
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute()", "exception=" + e.getMessage());
if (corr != null)
{
corr.done(req_id);
}
return false;
}
long orig_timeout = timeout;
if (timeout <= 0)
{
while (true)
{
/*
* Wait for responses:
*/
adjustMembership(); // may not be necessary, just to make sure...
if (getResponses())
{
if (corr != null)
{
corr.done(req_id);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "received all responses: " + toString());
}
return true;
}
try
{
Monitor.wait(rsp_mutex);//rsp_mutex.wait();
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute():2", "exception=" + e.getMessage());
}
}
}
else
{
start_time = System.currentTimeMillis();
long wakeuptime = timeout;
int retries = s_requestEnquiryRetries;
int enquiryFailure = 0;
if (s_allowRequestEnquiry)
{
wakeuptime = s_requestEnquiryInterval * 1000;
}
while (timeout > 0)
{
/*
* Wait for responses:
*/
if (getResponses())
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.doExec", "req_id :" + req_id + " completed");
}
if (corr != null)
{
corr.done(req_id);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "received all responses: " + toString());
}
return true;
}
timeout = orig_timeout - (System.currentTimeMillis() - start_time);
if (s_allowRequestEnquiry)
{
if (wakeuptime > timeout)
{
wakeuptime = timeout;
}
}
else
{
wakeuptime = timeout;
}
if (timeout > 0)
{
try
{
timeout = orig_timeout - (System.currentTimeMillis() - start_time);
boolean reacquired = Monitor.wait(rsp_mutex,wakeuptime);//ThreadUtil.wait(rsp_mutex, wakeuptime);
if ((!reacquired || _seqReset) && s_allowRequestEnquiry)
{
if (getResponses())
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.doExec", "req_id :" + req_id + " completed");
}
if (corr != null)
{
corr.done(req_id);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute()", "received all responses: " + toString());
}
return true;
}
else
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.doExec", "req_id :" + req_id + " completed");
}
if ((timeout > 0 && wakeuptime < timeout) && retries > 0)
{
if (_seqReset)
{
_retriesAfteSeqReset++;
}
retries--;
boolean enquireAgain = GetRequestStatus();
if (!enquireAgain)
{
enquiryFailure++;
}
//for debugging and tesing purpose only. will be removed after testing.
getCacheLog().CriticalInfo("GetRequestStatus, retries : " + retries + ", enquire again : " + ((Boolean) (enquireAgain)).toString()
+ ", enquiry failure : " + enquiryFailure);
if (enquiryFailure >= 3 || _retriesAfteSeqReset > 3)
{
if (corr != null)
{
corr.done(req_id);
}
return false;
}
}
}
}
}
catch (Exception e)
{
getCacheLog().Error("GroupRequest.doExecute():3", "exception=" + e);
}
}
}
if (timeout <= 0)
{
RspList rspList = getResults();
String failedNodes = "";
if (rspList != null)
{
for (int i = 0; i < rspList.size(); i++)
{
Object tempVar = rspList.elementAt(i);
Rsp rsp = (Rsp) ((tempVar instanceof Rsp) ? tempVar : null);
if (rsp != null && !rsp.wasReceived())
{
failedNodes += rsp.getSender();
}
}
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.doExecute:", "[ " + req_id + " ] did not receive rsp from " + failedNodes + " [Timeout] " + timeout + " [timeout-val ="
+ orig_timeout + "]");
}
}
if (corr != null)
{
corr.done(req_id);
}
return false;
}
}
private boolean GetRequestStatus()
{
java.util.HashMap statusResult = null;
java.util.ArrayList failedNodes = new java.util.ArrayList();
RspList rspList = getResults();
boolean enquireStatusAgain = true;
int suspectCount = 0;
String notRecvNodes = "";
if (rspList != null)
{
for (int i = 0; i < rspList.size(); i++)
{
Object tempVar = rspList.elementAt(i);
Rsp rsp = (Rsp) ((tempVar instanceof Rsp) ? tempVar : null);
if (rsp != null && !rsp.wasReceived())
{
notRecvNodes += rsp.sender + ",";
failedNodes.add(rsp.getSender());
}
if (rsp != null && rsp.wasSuspected())
{
suspectCount++;
}
}
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.GetReqStatus", req_id + " rsp not received from " + failedNodes.size() + " nodes");
}
boolean resendReq = true;
java.util.ArrayList resendList = new java.util.ArrayList();
int notRespondingCount = 0;
if (failedNodes.size() > 0)
{
if (ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.GetReqStatus", " did not recv rsps from " + notRecvNodes + " nodes");
}
statusResult = corr.FetchRequestStatus(failedNodes, this.clusterMembership, req_id);
StringBuilder sb = null;
if (ServerMonitor.getMonitorActivity())
{
sb = new StringBuilder();
}
if (statusResult != null)
{
for (Iterator it = failedNodes.iterator(); it.hasNext();)
{
Address node = (Address) it.next();
RequestStatus status = (RequestStatus) ((statusResult.get(node) instanceof RequestStatus) ? statusResult.get(node) : null);
if (status.getStatus() == RequestStatus.REQ_NOT_RECEIVED)
{
if (sb != null)
{
sb.append("(" + node + ":" + "REQ_NOT_RECEIVED)");
}
resendList.add(node);
}
if (status.getStatus() == RequestStatus.NONE)
{
if (sb != null)
{
sb.append("(" + node + ":" + "NONE)");
}
notRespondingCount++;
}
if (status.getStatus() == RequestStatus.REQ_PROCESSED)
{
if (sb != null)
{
sb.append("(" + node + ":" + "REQ_PROCESSED)");
}
if (!request_msg.getIsSeqRequired())
{
resendList.add(node);
}
}
}
if (sb != null && ServerMonitor.getMonitorActivity())
{
ServerMonitor.LogClientActivity("GrpReq.GetReqStatus", "status of failed nodes " + sb.toString());
}
if (request_msg.getIsSeqRequired())
{
if (resendList.size() != rspList.size())
{
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.GetReqStatus", req_id + "sequence message; no need to resend; resend_count " + resendList.size());
}
if (notRespondingCount > 0)
{
resendReq = false;
}
else
{
enquireStatusAgain = false;
resendReq = false;
}
}
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.GetReqStatus", req_id + "received REQ_NOT_RECEIVED status from " + resendList.size() + " nodes");
}
}
else
{
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.GetReqStatus", req_id + " status result is NULL");
}
}
if (resendReq && resendList.size() > 0)
{
if (corr != null)
{
if (resendList.size() == 1)
{
request_msg.setDest((Address) ((resendList.get(0) instanceof Address) ? resendList.get(0) : null));
}
else
{
request_msg.setDests(resendList);
}
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info("GroupRequest.GetReqStatus", req_id + " resending messages to " + resendList.size());
}
corr.sendRequest(req_id, resendList, request_msg, rsp_mode == GET_NONE ? null : this);
}
}
}
return enquireStatusAgain;
}
/**
* Return number of elements of a certain type in array 'received'. Type can be RECEIVED, NOT_RECEIVED or SUSPECTED
*/
public int getNum(int type)
{
int retval = 0;
for (int i = 0; i < received.length; i++)
{
if (received[i] == type)
{
retval++;
}
}
return retval;
}
public int getNumFromNHops(int type)
{
int retval = 0;
synchronized (_nhopMutex)
{
for (Address replica : nHops)
{
byte status = 0;
if (receivedFromNHops.containsKey(replica))
{
status = receivedFromNHops.get(replica);
if (status == type)
{
retval++;
}
}
}
}
return retval;
}
public void printReceived()
{
for (int i = 0; i < received.length; i++)
{
if (getCacheLog().getIsInfoEnabled())
{
getCacheLog().Info(membership[i] + ": " + (received[i] == NOT_RECEIVED ? "NOT_RECEIVED" : (received[i] == RECEIVED ? "RECEIVED" : "SUSPECTED")));
}
}
}
/**
* Adjusts the 'received' array in the following way: <ul> <li>if a member P in 'membership' is not in 'members', P's entry in the 'received' array will be marked as SUSPECTED
* <li>if P is 'suspected_mbr', then P's entry in the 'received' array will be marked as SUSPECTED </ul> This call requires exclusive access to rsp_mutex (called by
* getResponses() which has a the rsp_mutex locked, so this should not be a problem).
*/
public void adjustMembership()
{
Address mbr;
if (membership == null || membership.length == 0)
{
return;
}
for (int i = 0; i < membership.length; i++)
{
mbr = membership[i];
if ((this.members != null && !this.members.contains(mbr)) || suspects.contains(mbr))
{
addSuspect(mbr);
responses[i] = null;
received[i] = SUSPECTED;
}
}
}
/**
* Adds a member to the 'suspects' list. Removes oldest elements from 'suspects' list to keep the list bounded ('max_suspects' number of elements)
*/
public void addSuspect(Address suspected_mbr)
{
if (!suspects.contains(suspected_mbr))
{
suspects.add(suspected_mbr);
while (suspects.size() >= max_suspects && suspects.size() > 0)
{
suspects.remove(0); // keeps queue bounded
}
}
}
}
| |
/*
* Copyright 2007 Sun Microsystems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rometools.propono.atom.client;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.httpclient.methods.GetMethod;
import org.jdom2.Element;
import com.rometools.propono.atom.common.AtomService;
import com.rometools.propono.atom.common.Categories;
import com.rometools.propono.atom.common.Collection;
import com.rometools.propono.atom.common.Workspace;
import com.rometools.propono.utils.ProponoException;
import com.rometools.rome.feed.atom.Entry;
import com.rometools.rome.io.impl.Atom10Parser;
/**
* Models an Atom collection, extends Collection and adds methods for adding, retrieving, updateing
* and deleting entries.
*/
public class ClientCollection extends Collection {
private final boolean writable = true;
private HttpClient httpClient = null;
private AuthStrategy authStrategy = null;
private ClientWorkspace workspace = null;
private ClientAtomService service = null;
ClientCollection(final Element e, final ClientWorkspace workspace, final String baseURI) throws ProponoException {
super(e, baseURI);
this.workspace = workspace;
service = workspace.getAtomService();
httpClient = workspace.getAtomService().getHttpClient();
authStrategy = workspace.getAtomService().getAuthStrategy();
parseCollectionElement(e);
}
ClientCollection(final String href, final AuthStrategy authStrategy) throws ProponoException {
super("Standalone connection", "text", href);
this.authStrategy = authStrategy;
try {
httpClient = new HttpClient(new MultiThreadedHttpConnectionManager());
// TODO: make connection timeout configurable
httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(30000);
} catch (final Throwable t) {
throw new ProponoException("ERROR creating HTTPClient", t);
}
}
void addAuthentication(final HttpMethodBase method) throws ProponoException {
authStrategy.addAuthentication(httpClient, method);
}
/**
* Package access to httpClient to allow use by ClientEntry and ClientMediaEntry.
*/
HttpClient getHttpClient() {
return httpClient;
}
/**
* Get iterator over entries in this collection. Entries returned are considered to be partial
* entries cannot be saved/updated.
*/
public Iterator<ClientEntry> getEntries() throws ProponoException {
return new EntryIterator(this);
}
/**
* Get full entry specified by entry edit URI. Note that entry may or may not be associated with
* this collection.
*
* @return ClientEntry or ClientMediaEntry specified by URI.
*/
public ClientEntry getEntry(final String uri) throws ProponoException {
final GetMethod method = new GetMethod(uri);
authStrategy.addAuthentication(httpClient, method);
try {
httpClient.executeMethod(method);
if (method.getStatusCode() != 200) {
throw new ProponoException("ERROR HTTP status code=" + method.getStatusCode());
}
final Entry romeEntry = Atom10Parser.parseEntry(new InputStreamReader(method.getResponseBodyAsStream()), uri, Locale.US);
if (!romeEntry.isMediaEntry()) {
return new ClientEntry(service, this, romeEntry, false);
} else {
return new ClientMediaEntry(service, this, romeEntry, false);
}
} catch (final Exception e) {
throw new ProponoException("ERROR: getting or parsing entry/media, HTTP code: ", e);
} finally {
method.releaseConnection();
}
}
/**
* Get workspace or null if collection is not associated with a workspace.
*/
public Workspace getWorkspace() {
return workspace;
}
/**
* Determines if collection is writable.
*/
public boolean isWritable() {
return writable;
}
/**
* Create new entry associated with collection, but do not save to server.
*
* @throws ProponoException if collecton is not writable.
*/
public ClientEntry createEntry() throws ProponoException {
if (!isWritable()) {
throw new ProponoException("Collection is not writable");
}
return new ClientEntry(service, this);
}
/**
* Create new media entry assocaited with collection, but do not save. server. Depending on the
* Atom server, you may or may not be able to persist the properties of the entry that is
* returned.
*
* @param title Title to used for uploaded file.
* @param slug String to be used in file-name of stored file
* @param contentType MIME content-type of file.
* @param bytes Data to be uploaded as byte array.
* @throws ProponoException if collecton is not writable
*/
public ClientMediaEntry createMediaEntry(final String title, final String slug, final String contentType, final byte[] bytes) throws ProponoException {
if (!isWritable()) {
throw new ProponoException("Collection is not writable");
}
return new ClientMediaEntry(service, this, title, slug, contentType, bytes);
}
/**
* Create new media entry assocaited with collection, but do not save. server. Depending on the
* Atom server, you may or may not be able to. persist the properties of the entry that is
* returned.
*
* @param title Title to used for uploaded file.
* @param slug String to be used in file-name of stored file
* @param contentType MIME content-type of file.
* @param is Data to be uploaded as InputStream.
* @throws ProponoException if collecton is not writable
*/
public ClientMediaEntry createMediaEntry(final String title, final String slug, final String contentType, final InputStream is) throws ProponoException {
if (!isWritable()) {
throw new ProponoException("Collection is not writable");
}
return new ClientMediaEntry(service, this, title, slug, contentType, is);
}
/**
* Save to collection a new entry that was created by a createEntry() or createMediaEntry() and
* save it to the server.
*
* @param entry Entry to be saved.
* @throws ProponoException on error, if collection is not writable or if entry is partial.
*/
public void addEntry(final ClientEntry entry) throws ProponoException {
if (!isWritable()) {
throw new ProponoException("Collection is not writable");
}
entry.addToCollection(this);
}
@Override
protected void parseCollectionElement(final Element element) throws ProponoException {
if (workspace == null) {
return;
}
setHref(element.getAttribute("href").getValue());
final Element titleElem = element.getChild("title", AtomService.ATOM_FORMAT);
if (titleElem != null) {
setTitle(titleElem.getText());
if (titleElem.getAttribute("type", AtomService.ATOM_FORMAT) != null) {
setTitleType(titleElem.getAttribute("type", AtomService.ATOM_FORMAT).getValue());
}
}
final List<Element> acceptElems = element.getChildren("accept", AtomService.ATOM_PROTOCOL);
if (acceptElems != null && !acceptElems.isEmpty()) {
for (final Element acceptElem : acceptElems) {
addAccept(acceptElem.getTextTrim());
}
}
// Loop to parse <app:categories> element to Categories objects
final List<Element> catsElems = element.getChildren("categories", AtomService.ATOM_PROTOCOL);
for (final Element catsElem : catsElems) {
final Categories cats = new ClientCategories(catsElem, this);
addCategories(cats);
}
}
}
| |
/*
* jcobyla
*
* The MIT License
*
* Copyright (c) 2012 Anders Gustafsson, Cureos AB.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
* FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Remarks:
*
* The original Fortran 77 version of this code was by Michael Powell (M.J.D.Powell @ damtp.cam.ac.uk)
* The Fortran 90 version was by Alan Miller (Alan.Miller @ vic.cmis.csiro.au). Latest revision - 30 October 1998
*/
package de.xypron.jcobyla;
import static org.junit.Assert.assertArrayEquals;
import org.junit.Test;
/**
* Test class for COBYLA2 employing tests from Report DAMTP 1992/NA5.
*
* @author Anders Gustafsson, Cureos AB.
*/
public class CobylaTest {
// FIELDS
private static final double RHOBEG = 0.5;
private static final double RHOEND = 1.0e-6;
private static final int IPRINT = 1;
private static final int MAX_FUN = 3500;
// TESTS
/**
* Minimization of a simple quadratic function of two variables.
*/
@Test
public void test01FindMinimum() {
System.out.format("%nOutput from test problem 1 (Simple quadratic)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
return 10.0 * Math.pow(x[0] + 1.0, 2.0) + Math.pow(x[1], 2.0);
}
};
double[] x = {1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 2, 0, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { -1.0, 0.0 }, x, 1.0e-5);
}
/**
* Easy two dimensional minimization in unit circle.
*/
@Test
public void test02FindMinimum() {
System.out.format("%nOutput from test problem 2 (2D unit circle calculation)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 1.0 - x[0] * x[0] - x[1] * x[1];
return x[0] * x[1];
}
};
double[] x = {1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 2, 1, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { Math.sqrt(0.5), -Math.sqrt(0.5) }, x, 1.0e-5);
}
/**
* Easy three dimensional minimization in ellipsoid.
*/
@Test
public void test03FindMinimum() {
System.out.format("%nOutput from test problem 3 (3D ellipsoid calculation)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 1.0 - x[0] * x[0] - 2.0 * x[1] * x[1] - 3.0 * x[2] * x[2];
return x[0] * x[1] * x[2];
}
};
double[] x = {1.0, 1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 3, 1, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { 1.0 / Math.sqrt(3.0), 1.0 / Math.sqrt(6.0), -1.0 / 3.0 }, x, 1.0e-5);
}
/**
* Weak version of Rosenbrock's problem.
*/
@Test
public void test04FindMinimum() {
System.out.format("%nOutput from test problem 4 (Weak Rosenbrock)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
return Math.pow(x[0] * x[0] - x[1], 2.0) + Math.pow(1.0 + x[0], 2.0);
}
};
double[] x = {1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 2, 0, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { -1.0, 1.0 }, x, 1.0e-4);
}
/**
* Intermediate version of Rosenbrock's problem.
*/
@Test
public void test05FindMinimum() {
System.out.format("%nOutput from test problem 5 (Intermediate Rosenbrock)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
return 10.0 * Math.pow(x[0] * x[0] - x[1], 2.0) + Math.pow(1.0 + x[0], 2.0);
}
};
double[] x = {1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 2, 0, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { -1.0, 1.0 }, x, 3.0e-4);
}
/**
* This problem is taken from Fletcher's book Practical Methods of
* Optimization and has the equation number (9.1.15).
*/
@Test
public void test06FindMinimum() {
System.out.format("%nOutput from test problem 6 (Equation (9.1.15) in Fletcher's book)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = x[1] - x[0] * x[0];
con[1] = 1.0 - x[0] * x[0] - x[1] * x[1];
return -x[0] - x[1];
}
};
double[] x = {1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 2, 2, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { Math.sqrt(0.5), Math.sqrt(0.5) }, x, 1.0e-5);
}
/**
* This problem is taken from Fletcher's book Practical Methods of
* Optimization and has the equation number (14.4.2).
*/
@Test
public void test07FindMinimum() {
System.out.format("%nOutput from test problem 7 (Equation (14.4.2) in Fletcher)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 5.0 * x[0] - x[1] + x[2];
con[1] = x[2] - x[0] * x[0] - x[1] * x[1] - 4.0 * x[1];
con[2] = x[2] - 5.0 * x[0] - x[1];
return x[2];
}
};
double[] x = {1.0, 1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 3, 3, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { 0.0, -3.0, -3.0 }, x, 1.0e-5);
}
/**
* This problem is taken from page 66 of Hock and Schittkowski's book Test
* Examples for Nonlinear Programming Codes. It is their test problem Number
* 43, and has the name Rosen-Suzuki.
*/
@Test
public void test08FindMinimum() {
System.out.format("%nOutput from test problem 8 (Rosen-Suzuki)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 8.0 - x[0] * x[0] - x[1] * x[1] - x[2] * x[2] - x[3] * x[3] - x[0] + x[1] - x[2] + x[3];
con[1] = 10.0 - x[0] * x[0] - 2.0 * x[1] * x[1] - x[2] * x[2] - 2.0 * x[3] * x[3] + x[0] + x[3];
con[2] = 5.0 - 2.0 * x[0] * x[0] - x[1] * x[1] - x[2] * x[2] - 2.0 * x[0] + x[1] + x[3];
return x[0] * x[0] + x[1] * x[1] + 2.0 * x[2] * x[2] + x[3] * x[3] - 5.0 * x[0] -
5.0 * x[1] - 21.0 * x[2] + 7.0 * x[3];
}
};
double[] x = {1.0, 1.0, 1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 4, 3, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null, new double[] { 0.0, 1.0, 2.0, -1.0 }, x, 1.0e-5);
}
/**
* This problem is taken from page 111 of Hock and Schittkowski's
* book Test Examples for Nonlinear Programming Codes. It is their
* test problem Number 100.
*/
@Test
public void test09FindMinimum() {
System.out.format("%nOutput from test problem 9 (Hock and Schittkowski 100)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 127.0 - 2.0 * x[0] * x[0] - 3.0 * Math.pow(x[1], 4.0) - x[2] - 4.0 * x[3] * x[3] - 5.0 * x[4];
con[1] = 282.0 - 7.0 * x[0] - 3.0 * x[1] - 10.0 * x[2] * x[2] - x[3] + x[4];
con[2] = 196.0 - 23.0 * x[0] - x[1] * x[1] - 6.0 * x[5] * x[5] + 8.0 * x[6];
con[3] = -4.0 * x[0] * x[0] - x[1] * x[1] + 3.0 * x[0] * x[1] - 2.0 * x[2] * x[2] - 5.0 * x[5] + 11.0 * x[6];
return Math.pow(x[0] - 10.0, 2.0) + 5.0 * Math.pow(x[1] - 12.0, 2.0) + Math.pow(x[2], 4.0) +
3.0 * Math.pow(x[3] - 11.0, 2.0) + 10.0 * Math.pow(x[4], 6.0) + 7.0 * x[5] * x[5] + Math.pow(x[6], 4.0) -
4.0 * x[5] * x[6] - 10.0 * x[5] - 8.0 * x[6];
}
};
double[] x = {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 7, 4, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null,
new double[] { 2.330499, 1.951372, -0.4775414, 4.365726, -0.624487, 1.038131, 1.594227 }, x, 1.0e-5);
}
/**
* This problem is taken from page 415 of Luenberger's book Applied
* Nonlinear Programming. It is to maximize the area of a hexagon of
* unit diameter.
*/
@Test
public void test10FindMinimum() {
System.out.format("%nOutput from test problem 10 (Hexagon area)%n");
Calcfc calcfc = new Calcfc() {
@Override
public double compute(int n, int m, double[] x, double[] con) {
con[0] = 1.0 - x[2] * x[2] - x[3] * x[3];
con[1] = 1.0 - x[8] * x[8];
con[2] = 1.0 - x[4] * x[4] - x[5] * x[5];
con[3] = 1.0 - x[0] * x[0] - Math.pow(x[1] - x[8], 2.0);
con[4] = 1.0 - Math.pow(x[0] - x[4], 2.0) - Math.pow(x[1] - x[5], 2.0);
con[5] = 1.0 - Math.pow(x[0] - x[6], 2.0) - Math.pow(x[1] - x[7], 2.0);
con[6] = 1.0 - Math.pow(x[2] - x[4], 2.0) - Math.pow(x[3] - x[5], 2.0);
con[7] = 1.0 - Math.pow(x[2] - x[6], 2.0) - Math.pow(x[3] - x[7], 2.0);
con[8] = 1.0 - x[6] * x[6] - Math.pow(x[7] - x[8], 2.0);
con[9] = x[0] * x[3] - x[1] * x[2];
con[10] = x[2] * x[8];
con[11] = -x[4] * x[8];
con[12] = x[4] * x[7] - x[5] * x[6];
con[13] = x[8];
return -0.5 * (x[0] * x[3] - x[1] * x[2] + x[2] * x[8] - x[4] * x[8] + x[4] * x[7] - x[5] * x[6]);
}
};
double[] x = {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 };
CobylaExitStatus result = Cobyla.findMinimum(calcfc, 9, 14, x, RHOBEG, RHOEND, IPRINT, MAX_FUN);
assertArrayEquals(null,
new double[] { x[0], x[1], x[2], x[3], x[0], x[1], x[2], x[3], 0.0 }, x, 1.0e-4);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.contrib.streaming.state;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.memory.ByteArrayInputStreamWithPos;
import org.apache.flink.core.memory.ByteArrayOutputStreamWithPos;
import org.apache.flink.core.memory.DataInputViewStreamWrapper;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.apache.flink.queryablestate.client.state.serialization.KvStateSerializer;
import org.apache.flink.runtime.state.KeyGroupRangeAssignment;
import org.apache.flink.runtime.state.internal.InternalMapState;
import org.apache.flink.util.Preconditions;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.rocksdb.RocksIterator;
import org.rocksdb.WriteOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
/**
* {@link MapState} implementation that stores state in RocksDB.
*
* <p>{@link RocksDBStateBackend} must ensure that we set the
* {@link org.rocksdb.StringAppendOperator} on the column family that we use for our state since
* we use the {@code merge()} call.
*
* @param <K> The type of the key.
* @param <N> The type of the namespace.
* @param <UK> The type of the keys in the map state.
* @param <UV> The type of the values in the map state.
*/
public class RocksDBMapState<K, N, UK, UV>
extends AbstractRocksDBState<K, N, MapState<UK, UV>, MapStateDescriptor<UK, UV>, Map<UK, UV>>
implements InternalMapState<N, UK, UV> {
private static final Logger LOG = LoggerFactory.getLogger(RocksDBMapState.class);
/** Serializer for the keys and values. */
private final TypeSerializer<UK> userKeySerializer;
private final TypeSerializer<UV> userValueSerializer;
/**
* We disable writes to the write-ahead-log here. We can't have these in the base class
* because JNI segfaults for some reason if they are.
*/
private final WriteOptions writeOptions;
/**
* Creates a new {@code RocksDBMapState}.
*
* @param namespaceSerializer The serializer for the namespace.
* @param stateDesc The state identifier for the state.
*/
public RocksDBMapState(ColumnFamilyHandle columnFamily,
TypeSerializer<N> namespaceSerializer,
MapStateDescriptor<UK, UV> stateDesc,
RocksDBKeyedStateBackend<K> backend) {
super(columnFamily, namespaceSerializer, stateDesc, backend);
this.userKeySerializer = stateDesc.getKeySerializer();
this.userValueSerializer = stateDesc.getValueSerializer();
writeOptions = new WriteOptions();
writeOptions.setDisableWAL(true);
}
// ------------------------------------------------------------------------
// MapState Implementation
// ------------------------------------------------------------------------
@Override
public UV get(UK userKey) throws IOException, RocksDBException {
byte[] rawKeyBytes = serializeUserKeyWithCurrentKeyAndNamespace(userKey);
byte[] rawValueBytes = backend.db.get(columnFamily, rawKeyBytes);
return (rawValueBytes == null ? null : deserializeUserValue(rawValueBytes));
}
@Override
public void put(UK userKey, UV userValue) throws IOException, RocksDBException {
byte[] rawKeyBytes = serializeUserKeyWithCurrentKeyAndNamespace(userKey);
byte[] rawValueBytes = serializeUserValue(userValue);
backend.db.put(columnFamily, writeOptions, rawKeyBytes, rawValueBytes);
}
@Override
public void putAll(Map<UK, UV> map) throws IOException, RocksDBException {
if (map == null) {
return;
}
for (Map.Entry<UK, UV> entry : map.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
@Override
public void remove(UK userKey) throws IOException, RocksDBException {
byte[] rawKeyBytes = serializeUserKeyWithCurrentKeyAndNamespace(userKey);
backend.db.remove(columnFamily, writeOptions, rawKeyBytes);
}
@Override
public boolean contains(UK userKey) throws IOException, RocksDBException {
byte[] rawKeyBytes = serializeUserKeyWithCurrentKeyAndNamespace(userKey);
byte[] rawValueBytes = backend.db.get(columnFamily, rawKeyBytes);
return (rawValueBytes != null);
}
@Override
public Iterable<Map.Entry<UK, UV>> entries() throws IOException, RocksDBException {
final Iterator<Map.Entry<UK, UV>> iterator = iterator();
// Return null to make the behavior consistent with other states.
if (!iterator.hasNext()) {
return null;
} else {
return new Iterable<Map.Entry<UK, UV>>() {
@Override
public Iterator<Map.Entry<UK, UV>> iterator() {
return iterator;
}
};
}
}
@Override
public Iterable<UK> keys() throws IOException, RocksDBException {
final byte[] prefixBytes = serializeCurrentKeyAndNamespace();
return new Iterable<UK>() {
@Override
public Iterator<UK> iterator() {
return new RocksDBMapIterator<UK>(backend.db, prefixBytes) {
@Override
public UK next() {
RocksDBMapEntry entry = nextEntry();
return (entry == null ? null : entry.getKey());
}
};
}
};
}
@Override
public Iterable<UV> values() throws IOException, RocksDBException {
final byte[] prefixBytes = serializeCurrentKeyAndNamespace();
return new Iterable<UV>() {
@Override
public Iterator<UV> iterator() {
return new RocksDBMapIterator<UV>(backend.db, prefixBytes) {
@Override
public UV next() {
RocksDBMapEntry entry = nextEntry();
return (entry == null ? null : entry.getValue());
}
};
}
};
}
@Override
public Iterator<Map.Entry<UK, UV>> iterator() throws IOException, RocksDBException {
final byte[] prefixBytes = serializeCurrentKeyAndNamespace();
return new RocksDBMapIterator<Map.Entry<UK, UV>>(backend.db, prefixBytes) {
@Override
public Map.Entry<UK, UV> next() {
return nextEntry();
}
};
}
@Override
public void clear() {
try {
Iterator<Map.Entry<UK, UV>> iterator = iterator();
while (iterator.hasNext()) {
iterator.next();
iterator.remove();
}
} catch (Exception e) {
LOG.warn("Error while cleaning the state.", e);
}
}
@Override
@SuppressWarnings("unchecked")
public byte[] getSerializedValue(byte[] serializedKeyAndNamespace) throws Exception {
Preconditions.checkNotNull(serializedKeyAndNamespace, "Serialized key and namespace");
//TODO make KvStateSerializer key-group aware to save this round trip and key-group computation
Tuple2<K, N> des = KvStateSerializer.deserializeKeyAndNamespace(
serializedKeyAndNamespace,
backend.getKeySerializer(),
namespaceSerializer);
int keyGroup = KeyGroupRangeAssignment.assignToKeyGroup(des.f0, backend.getNumberOfKeyGroups());
ByteArrayOutputStreamWithPos outputStream = new ByteArrayOutputStreamWithPos(128);
DataOutputViewStreamWrapper outputView = new DataOutputViewStreamWrapper(outputStream);
writeKeyWithGroupAndNamespace(keyGroup, des.f0, des.f1, outputStream, outputView);
final byte[] keyPrefixBytes = outputStream.toByteArray();
final Iterator<Map.Entry<UK, UV>> iterator = new RocksDBMapIterator<Map.Entry<UK, UV>>(backend.db, keyPrefixBytes) {
@Override
public Map.Entry<UK, UV> next() {
return nextEntry();
}
};
// Return null to make the behavior consistent with other backends
if (!iterator.hasNext()) {
return null;
}
return KvStateSerializer.serializeMap(new Iterable<Map.Entry<UK, UV>>() {
@Override
public Iterator<Map.Entry<UK, UV>> iterator() {
return iterator;
}
}, userKeySerializer, userValueSerializer);
}
// ------------------------------------------------------------------------
// Serialization Methods
// ------------------------------------------------------------------------
private byte[] serializeCurrentKeyAndNamespace() throws IOException {
writeCurrentKeyWithGroupAndNamespace();
return keySerializationStream.toByteArray();
}
private byte[] serializeUserKeyWithCurrentKeyAndNamespace(UK userKey) throws IOException {
writeCurrentKeyWithGroupAndNamespace();
userKeySerializer.serialize(userKey, keySerializationDataOutputView);
return keySerializationStream.toByteArray();
}
private byte[] serializeUserValue(UV userValue) throws IOException {
keySerializationStream.reset();
if (userValue == null) {
keySerializationDataOutputView.writeBoolean(true);
} else {
keySerializationDataOutputView.writeBoolean(false);
userValueSerializer.serialize(userValue, keySerializationDataOutputView);
}
return keySerializationStream.toByteArray();
}
private UK deserializeUserKey(byte[] rawKeyBytes) throws IOException {
ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos(rawKeyBytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
readKeyWithGroupAndNamespace(bais, in);
return userKeySerializer.deserialize(in);
}
private UV deserializeUserValue(byte[] rawValueBytes) throws IOException {
ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos(rawValueBytes);
DataInputViewStreamWrapper in = new DataInputViewStreamWrapper(bais);
boolean isNull = in.readBoolean();
return isNull ? null : userValueSerializer.deserialize(in);
}
// ------------------------------------------------------------------------
// Internal Classes
// ------------------------------------------------------------------------
/** A map entry in RocksDBMapState. */
private class RocksDBMapEntry implements Map.Entry<UK, UV> {
private final RocksDB db;
/** The raw bytes of the key stored in RocksDB. Each user key is stored in RocksDB
* with the format #KeyGroup#Key#Namespace#UserKey. */
private final byte[] rawKeyBytes;
/** The raw bytes of the value stored in RocksDB. */
private byte[] rawValueBytes;
/** True if the entry has been deleted. */
private boolean deleted;
/** The user key and value. The deserialization is performed lazily, i.e. the key
* and the value is deserialized only when they are accessed. */
private UK userKey = null;
private UV userValue = null;
RocksDBMapEntry(final RocksDB db, final byte[] rawKeyBytes, final byte[] rawValueBytes) {
this.db = db;
this.rawKeyBytes = rawKeyBytes;
this.rawValueBytes = rawValueBytes;
this.deleted = false;
}
public void remove() {
deleted = true;
rawValueBytes = null;
try {
db.remove(columnFamily, writeOptions, rawKeyBytes);
} catch (RocksDBException e) {
throw new RuntimeException("Error while removing data from RocksDB.", e);
}
}
@Override
public UK getKey() {
if (userKey == null) {
try {
userKey = deserializeUserKey(rawKeyBytes);
} catch (IOException e) {
throw new RuntimeException("Error while deserializing the user key.");
}
}
return userKey;
}
@Override
public UV getValue() {
if (deleted) {
return null;
} else {
if (userValue == null) {
try {
userValue = deserializeUserValue(rawValueBytes);
} catch (IOException e) {
throw new RuntimeException("Error while deserializing the user value.");
}
}
return userValue;
}
}
@Override
public UV setValue(UV value) {
if (deleted) {
throw new IllegalStateException("The value has already been deleted.");
}
UV oldValue = getValue();
try {
userValue = value;
rawValueBytes = serializeUserValue(value);
db.put(columnFamily, writeOptions, rawKeyBytes, rawValueBytes);
} catch (IOException | RocksDBException e) {
throw new RuntimeException("Error while putting data into RocksDB.", e);
}
return oldValue;
}
}
/** An auxiliary utility to scan all entries under the given key. */
private abstract class RocksDBMapIterator<T> implements Iterator<T> {
static final int CACHE_SIZE_BASE = 1;
static final int CACHE_SIZE_LIMIT = 128;
/** The db where data resides. */
private final RocksDB db;
/**
* The prefix bytes of the key being accessed. All entries under the same key
* has the same prefix, hence we can stop the iterating once coming across an
* entry with a different prefix.
*/
private final byte[] keyPrefixBytes;
/**
* True if all entries have been accessed or the iterator has come across an
* entry with a different prefix.
*/
private boolean expired = false;
/** A in-memory cache for the entries in the rocksdb. */
private ArrayList<RocksDBMapEntry> cacheEntries = new ArrayList<>();
private int cacheIndex = 0;
RocksDBMapIterator(final RocksDB db, final byte[] keyPrefixBytes) {
this.db = db;
this.keyPrefixBytes = keyPrefixBytes;
}
@Override
public boolean hasNext() {
loadCache();
return (cacheIndex < cacheEntries.size());
}
@Override
public void remove() {
if (cacheIndex == 0 || cacheIndex > cacheEntries.size()) {
throw new IllegalStateException("The remove operation must be called after an valid next operation.");
}
RocksDBMapEntry lastEntry = cacheEntries.get(cacheIndex - 1);
lastEntry.remove();
}
final RocksDBMapEntry nextEntry() {
loadCache();
if (cacheIndex == cacheEntries.size()) {
if (!expired) {
throw new IllegalStateException();
}
return null;
}
RocksDBMapEntry entry = cacheEntries.get(cacheIndex);
cacheIndex++;
return entry;
}
private void loadCache() {
if (cacheIndex > cacheEntries.size()) {
throw new IllegalStateException();
}
// Load cache entries only when the cache is empty and there still exist unread entries
if (cacheIndex < cacheEntries.size() || expired) {
return;
}
RocksIterator iterator = db.newIterator(columnFamily);
/*
* The iteration starts from the prefix bytes at the first loading. The cache then is
* reloaded when the next entry to return is the last one in the cache. At that time,
* we will start the iterating from the last returned entry.
*/
RocksDBMapEntry lastEntry = cacheEntries.size() == 0 ? null : cacheEntries.get(cacheEntries.size() - 1);
byte[] startBytes = (lastEntry == null ? keyPrefixBytes : lastEntry.rawKeyBytes);
int numEntries = (lastEntry == null ? CACHE_SIZE_BASE : Math.min(cacheEntries.size() * 2, CACHE_SIZE_LIMIT));
cacheEntries.clear();
cacheIndex = 0;
iterator.seek(startBytes);
/*
* If the last returned entry is not deleted, it will be the first entry in the
* iterating. Skip it to avoid redundant access in such cases.
*/
if (lastEntry != null && !lastEntry.deleted) {
iterator.next();
}
while (true) {
if (!iterator.isValid() || !underSameKey(iterator.key())) {
expired = true;
break;
}
if (cacheEntries.size() >= numEntries) {
break;
}
RocksDBMapEntry entry = new RocksDBMapEntry(db, iterator.key(), iterator.value());
cacheEntries.add(entry);
iterator.next();
}
iterator.close();
}
private boolean underSameKey(byte[] rawKeyBytes) {
if (rawKeyBytes.length < keyPrefixBytes.length) {
return false;
}
for (int i = 0; i < keyPrefixBytes.length; ++i) {
if (rawKeyBytes[i] != keyPrefixBytes[i]) {
return false;
}
}
return true;
}
}
}
| |
// $ANTLR 3.1.3 Mar 18, 2009 10:09:25 /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g 2010-08-18 11:16:21
/**
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.flume.shell.antlr;
import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
import org.antlr.runtime.tree.*;
/**
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class FlumeShellParser extends Parser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "CMD", "DQUOTE", "SQUOTE", "STRING", "DQuoteLiteral", "SQuoteLiteral", "Argument", "HexDigit", "EscapeSequence", "UnicodeEscape", "OctalEscape", "Letter", "JavaIDDigit", "WS", "LINE_COMMENT", "';'"
};
public static final int DQuoteLiteral=8;
public static final int LINE_COMMENT=18;
public static final int CMD=4;
public static final int DQUOTE=5;
public static final int SQUOTE=6;
public static final int EOF=-1;
public static final int HexDigit=11;
public static final int T__19=19;
public static final int WS=17;
public static final int UnicodeEscape=13;
public static final int SQuoteLiteral=9;
public static final int JavaIDDigit=16;
public static final int Argument=10;
public static final int EscapeSequence=12;
public static final int OctalEscape=14;
public static final int Letter=15;
public static final int STRING=7;
// delegates
// delegators
public FlumeShellParser(TokenStream input) {
this(input, new RecognizerSharedState());
}
public FlumeShellParser(TokenStream input, RecognizerSharedState state) {
super(input, state);
}
protected TreeAdaptor adaptor = new CommonTreeAdaptor();
public void setTreeAdaptor(TreeAdaptor adaptor) {
this.adaptor = adaptor;
}
public TreeAdaptor getTreeAdaptor() {
return adaptor;
}
public String[] getTokenNames() { return FlumeShellParser.tokenNames; }
public String getGrammarFileName() { return "/home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g"; }
public void reportError(RecognitionException re) {
throw new RuntimeException ("Parser Error: "+re);
// throw re; // TODO (jon) provide more info on a parser fail
}
public static class lines_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "lines"
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:86:1: lines : command ( ';' command )* EOF -> ( command )+ ;
public final FlumeShellParser.lines_return lines() throws RecognitionException {
FlumeShellParser.lines_return retval = new FlumeShellParser.lines_return();
retval.start = input.LT(1);
Object root_0 = null;
Token char_literal2=null;
Token EOF4=null;
FlumeShellParser.command_return command1 = null;
FlumeShellParser.command_return command3 = null;
Object char_literal2_tree=null;
Object EOF4_tree=null;
RewriteRuleTokenStream stream_19=new RewriteRuleTokenStream(adaptor,"token 19");
RewriteRuleTokenStream stream_EOF=new RewriteRuleTokenStream(adaptor,"token EOF");
RewriteRuleSubtreeStream stream_command=new RewriteRuleSubtreeStream(adaptor,"rule command");
try {
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:86:7: ( command ( ';' command )* EOF -> ( command )+ )
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:86:9: command ( ';' command )* EOF
{
pushFollow(FOLLOW_command_in_lines85);
command1=command();
state._fsp--;
stream_command.add(command1.getTree());
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:86:17: ( ';' command )*
loop1:
do {
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0==19) ) {
alt1=1;
}
switch (alt1) {
case 1 :
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:86:18: ';' command
{
char_literal2=(Token)match(input,19,FOLLOW_19_in_lines88);
stream_19.add(char_literal2);
pushFollow(FOLLOW_command_in_lines90);
command3=command();
state._fsp--;
stream_command.add(command3.getTree());
}
break;
default :
break loop1;
}
} while (true);
EOF4=(Token)match(input,EOF,FOLLOW_EOF_in_lines94);
stream_EOF.add(EOF4);
// AST REWRITE
// elements: command
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 86:36: -> ( command )+
{
if ( !(stream_command.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_command.hasNext() ) {
adaptor.addChild(root_0, stream_command.nextTree());
}
stream_command.reset();
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
}
// $ANTLR end "lines"
public static class line_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "line"
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:88:1: line : command EOF -> command ;
public final FlumeShellParser.line_return line() throws RecognitionException {
FlumeShellParser.line_return retval = new FlumeShellParser.line_return();
retval.start = input.LT(1);
Object root_0 = null;
Token EOF6=null;
FlumeShellParser.command_return command5 = null;
Object EOF6_tree=null;
RewriteRuleTokenStream stream_EOF=new RewriteRuleTokenStream(adaptor,"token EOF");
RewriteRuleSubtreeStream stream_command=new RewriteRuleSubtreeStream(adaptor,"rule command");
try {
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:88:6: ( command EOF -> command )
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:88:8: command EOF
{
pushFollow(FOLLOW_command_in_line107);
command5=command();
state._fsp--;
stream_command.add(command5.getTree());
EOF6=(Token)match(input,EOF,FOLLOW_EOF_in_line109);
stream_EOF.add(EOF6);
// AST REWRITE
// elements: command
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 88:20: -> command
{
adaptor.addChild(root_0, stream_command.nextTree());
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
}
// $ANTLR end "line"
public static class command_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "command"
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:1: command : ( literal )+ -> ^( CMD ( literal )+ ) ;
public final FlumeShellParser.command_return command() throws RecognitionException {
FlumeShellParser.command_return retval = new FlumeShellParser.command_return();
retval.start = input.LT(1);
Object root_0 = null;
FlumeShellParser.literal_return literal7 = null;
RewriteRuleSubtreeStream stream_literal=new RewriteRuleSubtreeStream(adaptor,"rule literal");
try {
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:9: ( ( literal )+ -> ^( CMD ( literal )+ ) )
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:11: ( literal )+
{
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:11: ( literal )+
int cnt2=0;
loop2:
do {
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0>=DQuoteLiteral && LA2_0<=Argument)) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:11: literal
{
pushFollow(FOLLOW_literal_in_command122);
literal7=literal();
state._fsp--;
stream_literal.add(literal7.getTree());
}
break;
default :
if ( cnt2 >= 1 ) break loop2;
EarlyExitException eee =
new EarlyExitException(2, input);
throw eee;
}
cnt2++;
} while (true);
// AST REWRITE
// elements: literal
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 90:20: -> ^( CMD ( literal )+ )
{
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:90:23: ^( CMD ( literal )+ )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(CMD, "CMD"), root_1);
if ( !(stream_literal.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_literal.hasNext() ) {
adaptor.addChild(root_1, stream_literal.nextTree());
}
stream_literal.reset();
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
}
// $ANTLR end "command"
public static class literal_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "literal"
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:93:1: literal : ( DQuoteLiteral -> ^( DQUOTE DQuoteLiteral ) | SQuoteLiteral -> ^( SQUOTE SQuoteLiteral ) | Argument -> ^( STRING Argument ) );
public final FlumeShellParser.literal_return literal() throws RecognitionException {
FlumeShellParser.literal_return retval = new FlumeShellParser.literal_return();
retval.start = input.LT(1);
Object root_0 = null;
Token DQuoteLiteral8=null;
Token SQuoteLiteral9=null;
Token Argument10=null;
Object DQuoteLiteral8_tree=null;
Object SQuoteLiteral9_tree=null;
Object Argument10_tree=null;
RewriteRuleTokenStream stream_Argument=new RewriteRuleTokenStream(adaptor,"token Argument");
RewriteRuleTokenStream stream_DQuoteLiteral=new RewriteRuleTokenStream(adaptor,"token DQuoteLiteral");
RewriteRuleTokenStream stream_SQuoteLiteral=new RewriteRuleTokenStream(adaptor,"token SQuoteLiteral");
try {
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:94:5: ( DQuoteLiteral -> ^( DQUOTE DQuoteLiteral ) | SQuoteLiteral -> ^( SQUOTE SQuoteLiteral ) | Argument -> ^( STRING Argument ) )
int alt3=3;
switch ( input.LA(1) ) {
case DQuoteLiteral:
{
alt3=1;
}
break;
case SQuoteLiteral:
{
alt3=2;
}
break;
case Argument:
{
alt3=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 3, 0, input);
throw nvae;
}
switch (alt3) {
case 1 :
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:94:9: DQuoteLiteral
{
DQuoteLiteral8=(Token)match(input,DQuoteLiteral,FOLLOW_DQuoteLiteral_in_literal150);
stream_DQuoteLiteral.add(DQuoteLiteral8);
// AST REWRITE
// elements: DQuoteLiteral
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 94:23: -> ^( DQUOTE DQuoteLiteral )
{
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:94:26: ^( DQUOTE DQuoteLiteral )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(DQUOTE, "DQUOTE"), root_1);
adaptor.addChild(root_1, stream_DQuoteLiteral.nextNode());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:95:9: SQuoteLiteral
{
SQuoteLiteral9=(Token)match(input,SQuoteLiteral,FOLLOW_SQuoteLiteral_in_literal170);
stream_SQuoteLiteral.add(SQuoteLiteral9);
// AST REWRITE
// elements: SQuoteLiteral
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 95:23: -> ^( SQUOTE SQuoteLiteral )
{
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:95:26: ^( SQUOTE SQuoteLiteral )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(SQUOTE, "SQUOTE"), root_1);
adaptor.addChild(root_1, stream_SQuoteLiteral.nextNode());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 3 :
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:96:9: Argument
{
Argument10=(Token)match(input,Argument,FOLLOW_Argument_in_literal188);
stream_Argument.add(Argument10);
// AST REWRITE
// elements: Argument
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 96:21: -> ^( STRING Argument )
{
// /home/patrick/Documents/flume-git2/flume/src/antlr/FlumeShell.g:96:24: ^( STRING Argument )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(STRING, "STRING"), root_1);
adaptor.addChild(root_1, stream_Argument.nextNode());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
}
// $ANTLR end "literal"
// Delegated rules
public static final BitSet FOLLOW_command_in_lines85 = new BitSet(new long[]{0x0000000000080000L});
public static final BitSet FOLLOW_19_in_lines88 = new BitSet(new long[]{0x0000000000000700L});
public static final BitSet FOLLOW_command_in_lines90 = new BitSet(new long[]{0x0000000000080000L});
public static final BitSet FOLLOW_EOF_in_lines94 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_command_in_line107 = new BitSet(new long[]{0x0000000000000000L});
public static final BitSet FOLLOW_EOF_in_line109 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_literal_in_command122 = new BitSet(new long[]{0x0000000000000702L});
public static final BitSet FOLLOW_DQuoteLiteral_in_literal150 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_SQuoteLiteral_in_literal170 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_Argument_in_literal188 = new BitSet(new long[]{0x0000000000000002L});
}
| |
package com.kii.thing_if.query;
import com.kii.thing_if.clause.query.EqualsClauseInQuery;
import com.kii.thing_if.clause.query.NotEqualsClauseInQuery;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
@RunWith(RobolectricTestRunner.class)
public class HistoryStatesQueryTest {
@Test
public void baseTest() {
EqualsClauseInQuery query = new EqualsClauseInQuery("dummy", "value");
HistoryStatesQuery target = HistoryStatesQuery.Builder.newBuilder("alias", query)
.build();
HistoryStatesQuery sameOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.build();
HistoryStatesQuery differentOne = HistoryStatesQuery.Builder
.newBuilder(
"alias",
new NotEqualsClauseInQuery(query))
.build();
Assert.assertTrue(target.equals(sameOne));
Assert.assertEquals(target.hashCode(), sameOne.hashCode());
Assert.assertFalse(target.equals(differentOne));
Assert.assertNotSame(target.hashCode(), differentOne.hashCode());
Assert.assertFalse(target.equals(null));
Assert.assertFalse(target.equals((Object)query));
}
@Test
public void equals_hashCode_FirmwareVersionTest() {
EqualsClauseInQuery query = new EqualsClauseInQuery("dummy", "value");
HistoryStatesQuery target = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.build();
HistoryStatesQuery sameOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.build();
HistoryStatesQuery differentOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("different")
.build();
HistoryStatesQuery differentNull = HistoryStatesQuery.Builder.newBuilder("alias", query)
.build();
Assert.assertTrue(target.equals(sameOne));
Assert.assertEquals(target.hashCode(), sameOne.hashCode());
Assert.assertFalse(target.equals(differentOne));
Assert.assertNotSame(target.hashCode(), differentOne.hashCode());
Assert.assertFalse(target.equals(differentNull));
Assert.assertNotSame(target.hashCode(), differentNull.hashCode());
Assert.assertFalse(target.equals(null));
Assert.assertFalse(target.equals((Object)query));
}
@Test
public void equals_hashCode_BestEffortLimitTest() {
EqualsClauseInQuery query = new EqualsClauseInQuery("dummy", "value");
HistoryStatesQuery target = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.build();
HistoryStatesQuery sameOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.build();
HistoryStatesQuery differentOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(200)
.build();
HistoryStatesQuery differentNull = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.build();
Assert.assertTrue(target.equals(sameOne));
Assert.assertEquals(target.hashCode(), sameOne.hashCode());
Assert.assertFalse(target.equals(differentOne));
Assert.assertNotSame(target.hashCode(), differentOne.hashCode());
Assert.assertFalse(target.equals(differentNull));
Assert.assertNotSame(target.hashCode(), differentNull.hashCode());
Assert.assertFalse(target.equals(null));
Assert.assertFalse(target.equals((Object)query));
}
@Test
public void equals_hashCode_NextPaginationKeyTest() {
EqualsClauseInQuery query = new EqualsClauseInQuery("dummy", "value");
HistoryStatesQuery target = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.setNextPaginationKey("key")
.build();
HistoryStatesQuery sameOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.setNextPaginationKey("key")
.build();
HistoryStatesQuery differentOne = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.setNextPaginationKey("different")
.build();
HistoryStatesQuery differentNull = HistoryStatesQuery.Builder.newBuilder("alias", query)
.setFirmwareVersion("version")
.setBestEffortLimit(10)
.build();
Assert.assertTrue(target.equals(sameOne));
Assert.assertEquals(target.hashCode(), sameOne.hashCode());
Assert.assertFalse(target.equals(differentOne));
Assert.assertNotSame(target.hashCode(), differentOne.hashCode());
Assert.assertFalse(target.equals(differentNull));
Assert.assertNotSame(target.hashCode(), differentNull.hashCode());
Assert.assertFalse(target.equals(null));
Assert.assertFalse(target.equals((Object)query));
}
@Test
public void basicBuilderTest() {
EqualsClauseInQuery clause = new EqualsClauseInQuery("power", true);
// only has required ones
HistoryStatesQuery query = HistoryStatesQuery.Builder.newBuilder("alias1", clause).build();
Assert.assertEquals("alias1", query.getAlias());
Assert.assertTrue(clause.equals(query.getClause()));
// only has one field(firmwareVersion, bestEfforLimit or nextPaginationKey)
HistoryStatesQuery query1 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setFirmwareVersion("v1").build();
Assert.assertEquals("alias1", query1.getAlias());
Assert.assertTrue(clause.equals(query1.getClause()));
Assert.assertEquals("v1", query1.getFirmwareVersion());
HistoryStatesQuery query2 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setBestEffortLimit(5).build();
Assert.assertEquals("alias1", query2.getAlias());
Assert.assertTrue(clause.equals(query2.getClause()));
Assert.assertNotNull(query2.getBestEffortLimit());
Assert.assertEquals(5, query2.getBestEffortLimit().intValue());
HistoryStatesQuery query3 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setNextPaginationKey("100/2").build();
Assert.assertEquals("alias1", query3.getAlias());
Assert.assertTrue(clause.equals(query3.getClause()));
Assert.assertEquals("100/2", query3.getNextPaginationKey());
// has 2 options fields
HistoryStatesQuery query4 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setFirmwareVersion("v1")
.setBestEffortLimit(5).build();
Assert.assertEquals("alias1", query4.getAlias());
Assert.assertTrue(clause.equals(query4.getClause()));
Assert.assertEquals("v1", query4.getFirmwareVersion());
Assert.assertNotNull(query4.getBestEffortLimit());
Assert.assertEquals(5, query4.getBestEffortLimit().intValue());
HistoryStatesQuery query5 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setBestEffortLimit(5)
.setNextPaginationKey("100/2").build();
Assert.assertEquals("alias1", query5.getAlias());
Assert.assertTrue(clause.equals(query5.getClause()));
Assert.assertNotNull(query5.getBestEffortLimit());
Assert.assertEquals(5, query5.getBestEffortLimit().intValue());
Assert.assertEquals("100/2", query5.getNextPaginationKey());
HistoryStatesQuery query6 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setNextPaginationKey("100/2")
.setFirmwareVersion("v1").build();
Assert.assertEquals("alias1", query6.getAlias());
Assert.assertTrue(clause.equals(query6.getClause()));
Assert.assertEquals("100/2", query6.getNextPaginationKey());
Assert.assertEquals("v1", query6.getFirmwareVersion());
// have 3 fields
HistoryStatesQuery query7 = HistoryStatesQuery.Builder.newBuilder("alias1", clause)
.setNextPaginationKey("100/2")
.setBestEffortLimit(5)
.setFirmwareVersion("v1").build();
Assert.assertEquals("alias1", query7.getAlias());
Assert.assertTrue(clause.equals(query7.getClause()));
Assert.assertEquals("100/2", query7.getNextPaginationKey());
Assert.assertEquals("v1", query7.getFirmwareVersion());
Assert.assertNotNull(query7.getBestEffortLimit());
Assert.assertEquals(5, query7.getBestEffortLimit().intValue());
}
@Test(expected = IllegalArgumentException.class)
public void builder_build_with_nullAlias_Test() {
EqualsClauseInQuery clause = new EqualsClauseInQuery("power", true);
HistoryStatesQuery.Builder.newBuilder(null, clause).build();
}
@Test(expected = IllegalArgumentException.class)
public void builder_build_with_emptyAlias_Test() {
EqualsClauseInQuery clause = new EqualsClauseInQuery("power", true);
HistoryStatesQuery.Builder.newBuilder("", clause).build();
}
@Test(expected = IllegalArgumentException.class)
public void builder_build_with_nullClause_Test() {
HistoryStatesQuery.Builder.newBuilder("alias1", null).build();
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.services.persondir.support;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.apereo.services.persondir.IPersonAttributeDaoFilter;
import org.apereo.services.persondir.IPersonAttributes;
import org.apereo.services.persondir.AbstractPersonAttributeDaoTest;
import org.apereo.services.persondir.IPersonAttributeDao;
import org.apereo.services.persondir.util.CaseCanonicalizationMode;
import org.apereo.services.persondir.util.Util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author Eric Dalquist
*/
public class AbstractQueryPersonAttributeDaoTest extends AbstractPersonAttributeDaoTest {
private TestQueryPersonAttributeDao testQueryPersonAttributeDao;
/**
* @see junit.framework.TestCase#setUp()
*/
@Override
protected void setUp() throws Exception {
this.testQueryPersonAttributeDao = new TestQueryPersonAttributeDao();
}
/**
* @see junit.framework.TestCase#tearDown()
*/
@Override
protected void tearDown() throws Exception {
this.testQueryPersonAttributeDao = null;
}
public void testDefaultAttributeNameUsage() {
this.testQueryPersonAttributeDao.getUserAttributes("eric", IPersonAttributeDaoFilter.alwaysChoose());
final List<List<Object>> args = this.testQueryPersonAttributeDao.getArgs();
//Do asList for an easy comparison
assertEquals(Collections.singletonList(Collections.singletonList("eric")), args);
}
public void testNoQueryAttributeMapping() {
this.testQueryPersonAttributeDao.getUserAttributes("eric", IPersonAttributeDaoFilter.alwaysChoose());
final List<List<Object>> args1 = this.testQueryPersonAttributeDao.getArgs();
assertEquals(Arrays.asList(Arrays.asList("eric")), args1);
this.testQueryPersonAttributeDao.setUseAllQueryAttributes(false);
this.testQueryPersonAttributeDao.getUserAttributes("eric", IPersonAttributeDaoFilter.alwaysChoose());
final List<List<Object>> args2 = this.testQueryPersonAttributeDao.getArgs();
assertNull(args2);
}
public void testInsuffcientSeed() {
final Map<String, String> queryAttributes = new LinkedHashMap<>();
queryAttributes.put("userid", null);
this.testQueryPersonAttributeDao.setQueryAttributeMapping(queryAttributes);
this.testQueryPersonAttributeDao.getUserAttributes("eric", IPersonAttributeDaoFilter.alwaysChoose());
final List<List<Object>> args = this.testQueryPersonAttributeDao.getArgs();
assertNull(args);
}
public void testCustomAttributes() {
final Map<String, String> queryAttributes = new LinkedHashMap<>();
queryAttributes.put("name.first", null);
queryAttributes.put("name.last", null);
this.testQueryPersonAttributeDao.setQueryAttributeMapping(queryAttributes);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("name.first", Collections.singletonList((Object) "eric"));
seed.put("name.last", Collections.singletonList((Object) "dalquist"));
this.testQueryPersonAttributeDao.getMultivaluedUserAttributes(seed, IPersonAttributeDaoFilter.alwaysChoose());
final List<List<Object>> args = this.testQueryPersonAttributeDao.getArgs();
final Object[] expectedArgs = new Object[]{Collections.singletonList("eric"), Collections.singletonList("dalquist")};
//Do asList for an easy comparison
assertTrue(Arrays.asList(expectedArgs).containsAll(args));
}
public void testMapPersonAttributes_AsIs() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("edalquist"));
storedAttrs.put("name.first", Util.list("eric"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
// By default should just echo attribs from data layer as-is
assertEquals("edalquist", result.getName());
assertEquals(Util.genList("edalquist"), result.getAttributeValues("username"));
assertEquals(Util.genList("eric"), result.getAttributeValues("name.first"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("name.last"));
}
public void testMapPersonAttributes_Mapped() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("edalquist"));
storedAttrs.put("name.first", Util.list("eric"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
final Map<String, String> resultAttributeMappings = new LinkedHashMap<>();
resultAttributeMappings.put("name.first", "fname");
resultAttributeMappings.put("name.last", "lname");
dao.setResultAttributeMapping(resultAttributeMappings);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
assertEquals("edalquist", result.getName());
// Don't actually get a username attribute in this case because it's
// not in the result attribute mappings. But it *is* successfully mapped
// into the special "name" property on the IPersonAttributes as asserted
// above
assertEquals(Util.genList("eric"), result.getAttributeValues("fname"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("lname"));
}
public void testMapPersonAttributes_CaseInsensitive() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("edalquist"));
storedAttrs.put("name.first", Util.list("eric"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
final Map<String, CaseCanonicalizationMode> caseInsensitiveAttributes = new HashMap<>();
caseInsensitiveAttributes.put("name.first", CaseCanonicalizationMode.UPPER);
dao.setCaseInsensitiveResultAttributes(caseInsensitiveAttributes);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
// By default should just echo attribs from data layer as-is
assertEquals("edalquist", result.getName());
assertEquals(Util.genList("edalquist"), result.getAttributeValues("username"));
assertEquals(Util.genList("ERIC"), result.getAttributeValues("name.first"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("name.last"));
}
public void testMapPersonAttributes_MappedCaseInsensitive() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("edalquist"));
storedAttrs.put("name.first", Util.list("eric"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
final Map<String, CaseCanonicalizationMode> caseInsensitiveAttributes = new HashMap<>();
caseInsensitiveAttributes.put("fname", CaseCanonicalizationMode.UPPER);
dao.setCaseInsensitiveResultAttributes(caseInsensitiveAttributes);
final Map<String, String> resultAttributeMappings = new LinkedHashMap<>();
resultAttributeMappings.put("name.first", "fname");
resultAttributeMappings.put("name.last", "lname");
dao.setResultAttributeMapping(resultAttributeMappings);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
assertEquals("edalquist", result.getName());
// Don't actually get a username attribute in this case because it's
// not in the result attribute mappings. But it *is* successfully mapped
// into the special "name" property on the IPersonAttributes as asserted
// above
assertEquals(Util.genList("ERIC"), result.getAttributeValues("fname"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("lname"));
}
public void testMapPersonAttributes_CaseInsensitiveDefaultCanonicalization() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("EDALQUIST"));
storedAttrs.put("name.first", Util.list("ERIC"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
// Not setting the CaseCanonicalizationMode here nor with an explicit
// setter
final Collection<String> caseInsensitiveAttributes = new HashSet<>();
caseInsensitiveAttributes.add("username");
caseInsensitiveAttributes.add("name.first");
dao.setCaseInsensitiveResultAttributesAsCollection(caseInsensitiveAttributes);
// Without this the username *attribute* will be canonicalized correctly
// but the special username ("name", actually) *property* on
// IPersonAttributes won't be. See test below
dao.setUsernameCaseCanonicalizationMode(CaseCanonicalizationMode.LOWER);
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
// By default should just echo attribs from data layer as-is
assertEquals("edalquist", result.getName());
assertEquals(Util.genList("edalquist"), result.getAttributeValues("username"));
assertEquals(Util.genList("eric"), result.getAttributeValues("name.first"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("name.last"));
}
public void testMapPersonAttributes_IndependentUsernameCanonicalization() {
final Map<String, List<Object>> storedAttrs = new HashMap<>();
storedAttrs.put("username", Util.list("EDALQUIST"));
storedAttrs.put("name.first", Util.list("ERIC"));
storedAttrs.put("name.last", Util.list("dalquist"));
final InMemoryAbstractQueryPersonAttributeDao dao = new InMemoryAbstractQueryPersonAttributeDao(storedAttrs);
// Not setting the CaseCanonicalizationMode here nor with an explicit
// setter
final Collection<String> caseInsensitiveAttributes = new HashSet<>();
caseInsensitiveAttributes.add("username");
caseInsensitiveAttributes.add("name.first");
dao.setCaseInsensitiveResultAttributesAsCollection(caseInsensitiveAttributes);
// Intentionally *not* calling setUsernameCaseCanonicalizationMode()
final Map<String, List<Object>> seed = new HashMap<>();
seed.put("username", Collections.singletonList((Object) "edalquist"));
final Set<IPersonAttributes> allResults = dao.getPeopleWithMultivaluedAttributes(seed,
IPersonAttributeDaoFilter.alwaysChoose());
assertEquals(1, allResults.size());
final IPersonAttributes result = allResults.iterator().next();
// Username canonicalization always independent, for better or worse,
// of attribute canonicalization. See setUsernameCaseCanonicalizationMode()
assertEquals("EDALQUIST", result.getName());
assertEquals(Util.genList("edalquist"), result.getAttributeValues("username"));
assertEquals(Util.genList("eric"), result.getAttributeValues("name.first"));
assertEquals(Util.genList("dalquist"), result.getAttributeValues("name.last"));
}
@Override
protected IPersonAttributeDao getPersonAttributeDaoInstance() {
return testQueryPersonAttributeDao;
}
private static class InMemoryAbstractQueryPersonAttributeDao extends AbstractQueryPersonAttributeDao<List<List<Object>>> {
private StubPersonAttributeDao storage;
InMemoryAbstractQueryPersonAttributeDao(final Map<String, List<Object>> backingMap) {
storage = new StubPersonAttributeDao(backingMap);
}
@Override
protected List<IPersonAttributes> getPeopleForQuery(final List<List<Object>> queryBuilder, final String queryUserName) {
return new ArrayList(storage.getPeopleWithMultivaluedAttributes(new HashMap<String, List<Object>>(),
IPersonAttributeDaoFilter.alwaysChoose()));
}
@Override
protected List<List<Object>> appendAttributeToQuery(List<List<Object>> queryBuilder, final String dataAttribute, final List<Object> queryValues) {
// copy/paste from TestQueryPersonAttributeDao. Don't really care what this does, though
if (queryBuilder == null) {
queryBuilder = new LinkedList<>();
}
queryBuilder.add(queryValues);
return queryBuilder;
}
}
public static class TestQueryPersonAttributeDao extends AbstractQueryPersonAttributeDao<List<List<Object>>> {
private List<List<Object>> args = null;
@JsonCreator
public TestQueryPersonAttributeDao() {
super();
}
/**
* @return the args
*/
public List<List<Object>> getArgs() {
return this.args;
}
/* (non-Javadoc)
* @see org.jasig.services.persondir.support.AbstractQueryPersonAttributeDao#appendAttributeToQuery(java.lang.Object, java.lang.String, java.util.List)
*/
@Override
protected List<List<Object>> appendAttributeToQuery(List<List<Object>> queryBuilder, final String dataAttribute, final List<Object> queryValues) {
if (queryBuilder == null) {
queryBuilder = new LinkedList<>();
}
queryBuilder.add(queryValues);
return queryBuilder;
}
/* (non-Javadoc)
* @see org.jasig.services.persondir.support.AbstractQueryPersonAttributeDao#getPeopleForQuery(java.lang.Object, java.lang.String)
*/
@Override
protected List<IPersonAttributes> getPeopleForQuery(final List<List<Object>> queryBuilder, final String queryUserName) {
this.args = queryBuilder;
return null;
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.history;
import org.apache.commons.lang.time.DateUtils;
import org.camunda.bpm.engine.ProcessEngineConfiguration;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.history.HistoricProcessInstance;
import org.camunda.bpm.engine.impl.history.event.HistoricProcessInstanceEventEntity;
import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.Deployment;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class HistoricProcessInstanceTest extends PluggableProcessEngineTestCase {
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricDataCreatedForProcessExecution() {
Calendar calendar = new GregorianCalendar();
calendar.set(Calendar.YEAR, 2010);
calendar.set(Calendar.MONTH, 8);
calendar.set(Calendar.DAY_OF_MONTH, 30);
calendar.set(Calendar.HOUR_OF_DAY, 12);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
Date noon = calendar.getTime();
ClockUtil.setCurrentTime(noon);
final ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", "myBusinessKey");
assertEquals(1, historyService.createHistoricProcessInstanceQuery().unfinished().count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finished().count());
HistoricProcessInstance historicProcessInstance = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertNotNull(historicProcessInstance);
assertEquals(processInstance.getId(), historicProcessInstance.getId());
assertEquals(processInstance.getBusinessKey(), historicProcessInstance.getBusinessKey());
assertEquals(processInstance.getProcessDefinitionId(), historicProcessInstance.getProcessDefinitionId());
assertEquals(noon, historicProcessInstance.getStartTime());
assertNull(historicProcessInstance.getEndTime());
assertNull(historicProcessInstance.getDurationInMillis());
List<Task> tasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list();
assertEquals(1, tasks.size());
// in this test scenario we assume that 25 seconds after the process start, the
// user completes the task (yes! he must be almost as fast as me)
Date twentyFiveSecsAfterNoon = new Date(noon.getTime() + 25*1000);
ClockUtil.setCurrentTime(twentyFiveSecsAfterNoon);
taskService.complete(tasks.get(0).getId());
historicProcessInstance = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertNotNull(historicProcessInstance);
assertEquals(processInstance.getId(), historicProcessInstance.getId());
assertEquals(processInstance.getProcessDefinitionId(), historicProcessInstance.getProcessDefinitionId());
assertEquals(noon, historicProcessInstance.getStartTime());
assertEquals(twentyFiveSecsAfterNoon, historicProcessInstance.getEndTime());
assertEquals(new Long(25*1000), historicProcessInstance.getDurationInMillis());
assertTrue(((HistoricProcessInstanceEventEntity)historicProcessInstance).getDurationRaw() >= 25000);
assertEquals(0, historyService.createHistoricProcessInstanceQuery().unfinished().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testLongRunningHistoricDataCreatedForProcessExecution() {
final long ONE_YEAR = 1000 * 60 * 60 * 24 * 365;
Calendar cal = Calendar.getInstance();
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
Date now = cal.getTime();
ClockUtil.setCurrentTime(now);
final ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", "myBusinessKey");
assertEquals(1, historyService.createHistoricProcessInstanceQuery().unfinished().count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finished().count());
HistoricProcessInstance historicProcessInstance = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertEquals(now, historicProcessInstance.getStartTime());
List<Task> tasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list();
assertEquals(1, tasks.size());
// in this test scenario we assume that one year after the process start, the
// user completes the task (incredible speedy!)
cal.add(Calendar.YEAR, 1);
Date oneYearLater = cal.getTime();
ClockUtil.setCurrentTime(oneYearLater);
taskService.complete(tasks.get(0).getId());
historicProcessInstance = historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertEquals(now, historicProcessInstance.getStartTime());
assertEquals(oneYearLater, historicProcessInstance.getEndTime());
assertTrue(historicProcessInstance.getDurationInMillis() >= ONE_YEAR);
assertTrue(((HistoricProcessInstanceEventEntity)historicProcessInstance).getDurationRaw() >= ONE_YEAR);
assertEquals(0, historyService.createHistoricProcessInstanceQuery().unfinished().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testDeleteProcessInstanceHistoryCreated() {
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertNotNull(processInstance);
// delete process instance should not delete the history
runtimeService.deleteProcessInstance(processInstance.getId(), "cancel");
HistoricProcessInstance historicProcessInstance =
historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).singleResult();
assertNotNull(historicProcessInstance.getEndTime());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceStartDate() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Date date = new Date();
assertEquals(1, historyService.createHistoricProcessInstanceQuery().startDateOn(date).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().startDateBy(date).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().startDateBy(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().startDateBy(DateUtils.addDays(date, 1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().startDateOn(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().startDateOn(DateUtils.addDays(date, 1)).count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceFinishDateUnfinished() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
Date date = new Date();
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateOn(date).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateBy(date).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateBy(DateUtils.addDays(date, 1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateBy(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateOn(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateOn(DateUtils.addDays(date, 1)).count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceFinishDateFinished() {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("oneTaskProcess");
Date date = new Date();
runtimeService.deleteProcessInstance(pi.getId(), "cancel");
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finishDateOn(date).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finishDateBy(date).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finishDateBy(DateUtils.addDays(date, 1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateBy(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateOn(DateUtils.addDays(date, -1)).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishDateOn(DateUtils.addDays(date, 1)).count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceDelete() {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("oneTaskProcess");
runtimeService.deleteProcessInstance(pi.getId(), "cancel");
HistoricProcessInstance historicProcessInstance = historyService.createHistoricProcessInstanceQuery().singleResult();
assertNotNull(historicProcessInstance.getDeleteReason());
assertEquals("cancel", historicProcessInstance.getDeleteReason());
assertNotNull(historicProcessInstance.getEndTime());
}
/** See: https://app.camunda.com/jira/browse/CAM-1324 */
@Deployment
public void testHistoricProcessInstanceDeleteAsync() {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("failing");
runtimeService.deleteProcessInstance(pi.getId(), "cancel");
HistoricProcessInstance historicProcessInstance = historyService.createHistoricProcessInstanceQuery().singleResult();
assertNotNull(historicProcessInstance.getDeleteReason());
assertEquals("cancel", historicProcessInstance.getDeleteReason());
assertNotNull(historicProcessInstance.getEndTime());
}
/*@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceVariables() {
Map<String,Object> vars = new HashMap<String,Object>();
vars.put("foo", "bar");
vars.put("baz", "boo");
runtimeService.startProcessInstanceByKey("oneTaskProcess", vars);
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processVariableEquals("foo", "bar").count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processVariableEquals("baz", "boo").count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processVariableEquals("foo", "bar").processVariableEquals("baz", "boo").count());
}*/
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceQuery() {
Calendar startTime = Calendar.getInstance();
ClockUtil.setCurrentTime(startTime.getTime());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", "businessKey123");
Calendar hourAgo = Calendar.getInstance();
hourAgo.add(Calendar.HOUR_OF_DAY, -1);
Calendar hourFromNow = Calendar.getInstance();
hourFromNow.add(Calendar.HOUR_OF_DAY, 1);
// Start/end dates
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedBefore(hourAgo.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedBefore(hourFromNow.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedAfter(hourAgo.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedAfter(hourFromNow.getTime()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().startedBefore(hourFromNow.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().startedBefore(hourAgo.getTime()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().startedAfter(hourAgo.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().startedAfter(hourFromNow.getTime()).count());
// General fields
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finished().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processInstanceId(processInstance.getId()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processDefinitionId(processInstance.getProcessDefinitionId()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processDefinitionKey("oneTaskProcess").count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processInstanceBusinessKey("businessKey123").count());
List<String> exludeIds = new ArrayList<String>();
exludeIds.add("unexistingProcessDefinition");
assertEquals(1, historyService.createHistoricProcessInstanceQuery().processDefinitionKeyNotIn(exludeIds).count());
exludeIds.add("oneTaskProcess");
assertEquals(0, historyService.createHistoricProcessInstanceQuery().processDefinitionKeyNotIn(exludeIds).count());
// After finishing process
taskService.complete(taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult().getId());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finished().count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedBefore(hourAgo.getTime()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finishedBefore(hourFromNow.getTime()).count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().finishedAfter(hourAgo.getTime()).count());
assertEquals(0, historyService.createHistoricProcessInstanceQuery().finishedAfter(hourFromNow.getTime()).count());
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
public void testHistoricProcessInstanceSorting() {
runtimeService.startProcessInstanceByKey("oneTaskProcess");
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceId().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceStartTime().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceEndTime().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceDuration().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessDefinitionId().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceBusinessKey().asc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceId().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceStartTime().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceEndTime().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceDuration().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessDefinitionId().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceBusinessKey().desc().list().size());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceId().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceStartTime().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceEndTime().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceDuration().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessDefinitionId().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceBusinessKey().asc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceId().desc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceStartTime().desc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceEndTime().desc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceDuration().desc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessDefinitionId().desc().count());
assertEquals(1, historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceBusinessKey().desc().count());
}
public void testInvalidSorting() {
try {
historyService.createHistoricProcessInstanceQuery().asc();
fail();
} catch (ProcessEngineException e) {
}
try {
historyService.createHistoricProcessInstanceQuery().desc();
fail();
} catch (ProcessEngineException e) {
}
try {
historyService.createHistoricProcessInstanceQuery().orderByProcessInstanceId().list();
fail();
} catch (ProcessEngineException e) {
}
}
@Deployment(resources = {"org/camunda/bpm/engine/test/history/oneTaskProcess.bpmn20.xml"})
// ACT-1098
public void testDeleteReason() {
if(!ProcessEngineConfiguration.HISTORY_NONE.equals(processEngineConfiguration.getHistory())) {
final String deleteReason = "some delete reason";
ProcessInstance pi = runtimeService.startProcessInstanceByKey("oneTaskProcess");
runtimeService.deleteProcessInstance(pi.getId(), deleteReason);
HistoricProcessInstance hpi = historyService.createHistoricProcessInstanceQuery().processInstanceId(pi.getId()).singleResult();
assertEquals(deleteReason, hpi.getDeleteReason());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.BlockWrite;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeFaultInjector;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInPipeline;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.hdfs.tools.DFSAdmin;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This tests pipeline recovery related client protocol works correct or not.
*/
public class TestClientProtocolForPipelineRecovery {
private static final Logger LOG =
LoggerFactory.getLogger(TestClientProtocolForPipelineRecovery.class);
@Test public void testGetNewStamp() throws IOException {
int numDataNodes = 1;
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
try {
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
NamenodeProtocols namenode = cluster.getNameNodeRpc();
/* Test writing to finalized replicas */
Path file = new Path("dataprotocol.dat");
DFSTestUtil.createFile(fileSys, file, 1L, (short)numDataNodes, 0L);
// get the first blockid for the file
ExtendedBlock firstBlock = DFSTestUtil.getFirstBlock(fileSys, file);
// test getNewStampAndToken on a finalized block
try {
namenode.updateBlockForPipeline(firstBlock, "");
Assert.fail("Can not get a new GS from a finalized block");
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains(
"not " + BlockUCState.UNDER_CONSTRUCTION));
}
// test getNewStampAndToken on a non-existent block
try {
long newBlockId = firstBlock.getBlockId() + 1;
ExtendedBlock newBlock = new ExtendedBlock(firstBlock.getBlockPoolId(),
newBlockId, 0, firstBlock.getGenerationStamp());
namenode.updateBlockForPipeline(newBlock, "");
Assert.fail("Cannot get a new GS from a non-existent block");
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains("does not exist"));
}
/* Test RBW replicas */
// change first block to a RBW
DFSOutputStream out = null;
try {
out = (DFSOutputStream)(fileSys.append(file).
getWrappedStream());
out.write(1);
out.hflush();
FSDataInputStream in = null;
try {
in = fileSys.open(file);
firstBlock = DFSTestUtil.getAllBlocks(in).get(0).getBlock();
} finally {
IOUtils.closeStream(in);
}
// test non-lease holder
DFSClient dfs = ((DistributedFileSystem)fileSys).dfs;
try {
namenode.updateBlockForPipeline(firstBlock, "test" + dfs.clientName);
Assert.fail("Cannot get a new GS for a non lease holder");
} catch (LeaseExpiredException e) {
Assert.assertTrue(e.getMessage().startsWith("Lease mismatch"));
}
// test null lease holder
try {
namenode.updateBlockForPipeline(firstBlock, null);
Assert.fail("Cannot get a new GS for a null lease holder");
} catch (LeaseExpiredException e) {
Assert.assertTrue(e.getMessage().startsWith("Lease mismatch"));
}
// test getNewStampAndToken on a rbw block
namenode.updateBlockForPipeline(firstBlock, dfs.clientName);
} finally {
IOUtils.closeStream(out);
}
} finally {
cluster.shutdown();
}
}
/** Test whether corrupt replicas are detected correctly during pipeline
* recoveries.
*/
@Test
public void testPipelineRecoveryForLastBlock() throws IOException {
DFSClientFaultInjector faultInjector
= Mockito.mock(DFSClientFaultInjector.class);
DFSClientFaultInjector oldInjector = DFSClientFaultInjector.get();
DFSClientFaultInjector.set(faultInjector);
Configuration conf = new HdfsConfiguration();
conf.setInt(HdfsClientConfigKeys.BlockWrite.LOCATEFOLLOWINGBLOCK_RETRIES_KEY, 3);
MiniDFSCluster cluster = null;
try {
int numDataNodes = 3;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
Path file = new Path("dataprotocol1.dat");
Mockito.when(faultInjector.failPacket()).thenReturn(true);
DFSTestUtil.createFile(fileSys, file, 68000000L, (short)numDataNodes, 0L);
// At this point, NN should have accepted only valid replicas.
// Read should succeed.
FSDataInputStream in = fileSys.open(file);
try {
in.read();
// Test will fail with BlockMissingException if NN does not update the
// replica state based on the latest report.
} catch (org.apache.hadoop.hdfs.BlockMissingException bme) {
Assert.fail("Block is missing because the file was closed with"
+ " corrupt replicas.");
}
} finally {
DFSClientFaultInjector.set(oldInjector);
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testPacketTransmissionDelay() throws Exception {
// Make the first datanode to not relay heartbeat packet.
DataNodeFaultInjector dnFaultInjector = new DataNodeFaultInjector() {
@Override
public boolean dropHeartbeatPacket() {
return true;
}
};
DataNodeFaultInjector oldDnInjector = DataNodeFaultInjector.get();
DataNodeFaultInjector.set(dnFaultInjector);
// Setting the timeout to be 3 seconds. Normally heartbeat packet
// would be sent every 1.5 seconds if there is no data traffic.
Configuration conf = new HdfsConfiguration();
conf.set(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, "3000");
MiniDFSCluster cluster = null;
try {
int numDataNodes = 2;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
FSDataOutputStream out = fs.create(new Path("noheartbeat.dat"), (short)2);
out.write(0x31);
out.hflush();
DFSOutputStream dfsOut = (DFSOutputStream)out.getWrappedStream();
// original pipeline
DatanodeInfo[] orgNodes = dfsOut.getPipeline();
// Cause the second datanode to timeout on reading packet
Thread.sleep(3500);
out.write(0x32);
out.hflush();
// new pipeline
DatanodeInfo[] newNodes = dfsOut.getPipeline();
out.close();
boolean contains = false;
for (int i = 0; i < newNodes.length; i++) {
if (orgNodes[0].getXferAddr().equals(newNodes[i].getXferAddr())) {
throw new IOException("The first datanode should have been replaced.");
}
if (orgNodes[1].getXferAddr().equals(newNodes[i].getXferAddr())) {
contains = true;
}
}
Assert.assertTrue(contains);
} finally {
DataNodeFaultInjector.set(oldDnInjector);
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test recovery on restart OOB message. It also tests the delivery of
* OOB ack originating from the primary datanode. Since there is only
* one node in the cluster, failure of restart-recovery will fail the
* test.
*/
@Test
public void testPipelineRecoveryOnOOB() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.set(HdfsClientConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY, "15");
MiniDFSCluster cluster = null;
try {
int numDataNodes = 1;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
Path file = new Path("dataprotocol2.dat");
DFSTestUtil.createFile(fileSys, file, 10240L, (short)1, 0L);
DFSOutputStream out = (DFSOutputStream)(fileSys.append(file).
getWrappedStream());
out.write(1);
out.hflush();
DFSAdmin dfsadmin = new DFSAdmin(conf);
DataNode dn = cluster.getDataNodes().get(0);
final String dnAddr = dn.getDatanodeId().getIpcAddr(false);
// issue shutdown to the datanode.
final String[] args1 = {"-shutdownDatanode", dnAddr, "upgrade" };
Assert.assertEquals(0, dfsadmin.run(args1));
// Wait long enough to receive an OOB ack before closing the file.
GenericTestUtils.waitForThreadTermination(
"Async datanode shutdown thread", 100, 10000);
// Retart the datanode
cluster.restartDataNode(0, true);
// The following forces a data packet and end of block packets to be sent.
out.close();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test that the writer is kicked out of a node.
*/
@Test
public void testEvictWriter() throws Exception {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes((int)3)
.build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
Path file = new Path("testEvictWriter.dat");
FSDataOutputStream out = fs.create(file, (short)2);
out.write(0x31);
out.hflush();
// get nodes in the pipeline
DFSOutputStream dfsOut = (DFSOutputStream)out.getWrappedStream();
DatanodeInfo[] nodes = dfsOut.getPipeline();
Assert.assertEquals(2, nodes.length);
String dnAddr = nodes[1].getIpcAddr(false);
// evict the writer from the second datanode and wait until
// the pipeline is rebuilt.
DFSAdmin dfsadmin = new DFSAdmin(conf);
final String[] args1 = {"-evictWriters", dnAddr };
Assert.assertEquals(0, dfsadmin.run(args1));
out.write(0x31);
out.hflush();
// get the new pipline and check the node is not in there.
nodes = dfsOut.getPipeline();
try {
Assert.assertTrue(nodes.length > 0 );
for (int i = 0; i < nodes.length; i++) {
Assert.assertFalse(dnAddr.equals(nodes[i].getIpcAddr(false)));
}
} finally {
out.close();
}
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/** Test restart timeout */
@Test
public void testPipelineRecoveryOnRestartFailure() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.set(HdfsClientConfigKeys.DFS_CLIENT_DATANODE_RESTART_TIMEOUT_KEY, "5");
MiniDFSCluster cluster = null;
try {
int numDataNodes = 2;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDataNodes).build();
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
Path file = new Path("dataprotocol3.dat");
DFSTestUtil.createFile(fileSys, file, 10240L, (short)2, 0L);
DFSOutputStream out = (DFSOutputStream)(fileSys.append(file).
getWrappedStream());
out.write(1);
out.hflush();
DFSAdmin dfsadmin = new DFSAdmin(conf);
DataNode dn = cluster.getDataNodes().get(0);
final String dnAddr1 = dn.getDatanodeId().getIpcAddr(false);
// issue shutdown to the datanode.
final String[] args1 = {"-shutdownDatanode", dnAddr1, "upgrade" };
Assert.assertEquals(0, dfsadmin.run(args1));
GenericTestUtils.waitForThreadTermination(
"Async datanode shutdown thread", 100, 10000);
// This should succeed without restarting the node. The restart will
// expire and regular pipeline recovery will kick in.
out.close();
// At this point there is only one node in the cluster.
out = (DFSOutputStream)(fileSys.append(file).
getWrappedStream());
out.write(1);
out.hflush();
dn = cluster.getDataNodes().get(1);
final String dnAddr2 = dn.getDatanodeId().getIpcAddr(false);
// issue shutdown to the datanode.
final String[] args2 = {"-shutdownDatanode", dnAddr2, "upgrade" };
Assert.assertEquals(0, dfsadmin.run(args2));
GenericTestUtils.waitForThreadTermination(
"Async datanode shutdown thread", 100, 10000);
try {
// close should fail
out.close();
assert false;
} catch (IOException ioe) { }
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* HDFS-9752. The client keeps sending heartbeat packets during datanode
* rolling upgrades. The client should be able to retry pipeline recovery
* more times than the default.
* (in a row for the same packet, including the heartbeat packet)
* (See{@link DataStreamer#pipelineRecoveryCount})
*/
@Test(timeout = 60000)
public void testPipelineRecoveryOnDatanodeUpgrade() throws Exception {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
Path file = new Path("/testPipelineRecoveryOnDatanodeUpgrade");
DFSTestUtil.createFile(fileSys, file, 10240L, (short) 2, 0L);
final DFSOutputStream out = (DFSOutputStream) (fileSys.append(file).
getWrappedStream());
out.write(1);
out.hflush();
final long oldGs = out.getBlock().getGenerationStamp();
MiniDFSCluster.DataNodeProperties dnProps =
cluster.stopDataNodeForUpgrade(0);
GenericTestUtils.waitForThreadTermination(
"Async datanode shutdown thread", 100, 10000);
cluster.restartDataNode(dnProps, true);
cluster.waitActive();
// wait pipeline to be recovered
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return out.getBlock().getGenerationStamp() > oldGs;
}
}, 100, 10000);
Assert.assertEquals("The pipeline recovery count shouldn't increase",
0, out.getStreamer().getPipelineRecoveryCount());
out.write(1);
out.close();
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testPipelineRecoveryOnRemoteDatanodeUpgrade() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(BlockWrite.ReplaceDatanodeOnFailure.BEST_EFFORT_KEY, true);
MiniDFSCluster cluster = null;
DFSClientFaultInjector old = DFSClientFaultInjector.get();
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
FileSystem fileSys = cluster.getFileSystem();
Path file = new Path("/testPipelineRecoveryOnDatanodeUpgrade");
DFSTestUtil.createFile(fileSys, file, 10240L, (short) 3, 0L);
// treat all restarting nodes as remote for test.
DFSClientFaultInjector.set(new DFSClientFaultInjector() {
public boolean skipRollingRestartWait() {
return true;
}
});
final DFSOutputStream out = (DFSOutputStream) fileSys.append(file)
.getWrappedStream();
final AtomicBoolean running = new AtomicBoolean(true);
final AtomicBoolean failed = new AtomicBoolean(false);
Thread t = new Thread() {
public void run() {
while (running.get()) {
try {
out.write("test".getBytes());
out.hflush();
// Keep writing data every one second
Thread.sleep(1000);
} catch (IOException | InterruptedException e) {
LOG.error("Exception during write", e);
failed.set(true);
break;
}
}
running.set(false);
}
};
t.start();
// Let write start
Thread.sleep(1000);
DatanodeInfo[] pipeline = out.getPipeline();
for (DatanodeInfo node : pipeline) {
assertFalse("Write should be going on", failed.get());
ArrayList<DataNode> dataNodes = cluster.getDataNodes();
int indexToShutdown = 0;
for (int i = 0; i < dataNodes.size(); i++) {
if (dataNodes.get(i).getIpcPort() == node.getIpcPort()) {
indexToShutdown = i;
break;
}
}
// Note old genstamp to findout pipeline recovery
final long oldGs = out.getBlock().getGenerationStamp();
MiniDFSCluster.DataNodeProperties dnProps = cluster
.stopDataNodeForUpgrade(indexToShutdown);
GenericTestUtils.waitForThreadTermination(
"Async datanode shutdown thread", 100, 10000);
cluster.restartDataNode(dnProps, true);
cluster.waitActive();
// wait pipeline to be recovered
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return out.getBlock().getGenerationStamp() > oldGs;
}
}, 100, 10000);
Assert.assertEquals("The pipeline recovery count shouldn't increase", 0,
out.getStreamer().getPipelineRecoveryCount());
}
assertFalse("Write should be going on", failed.get());
running.set(false);
t.join();
out.write("testagain".getBytes());
assertTrue("There should be atleast 2 nodes in pipeline still", out
.getPipeline().length >= 2);
out.close();
} finally {
DFSClientFaultInjector.set(old);
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test to make sure the checksum is set correctly after pipeline
* recovery transfers 0 byte partial block. If fails the test case
* will say "java.io.IOException: Failed to replace a bad datanode
* on the existing pipeline due to no more good datanodes being
* available to try." This indicates there was a real failure
* after the staged failure.
*/
@Test
public void testZeroByteBlockRecovery() throws Exception {
// Make the first datanode fail once. With 3 nodes and a block being
// created with 2 replicas, anything more than this planned failure
// will cause a test failure.
DataNodeFaultInjector dnFaultInjector = new DataNodeFaultInjector() {
int tries = 1;
@Override
public void stopSendingPacketDownstream(final String mirrAddr)
throws IOException {
if (tries > 0) {
tries--;
try {
Thread.sleep(60000);
} catch (InterruptedException ie) {
throw new IOException("Interrupted while sleeping. Bailing out.");
}
}
}
};
DataNodeFaultInjector oldDnInjector = DataNodeFaultInjector.get();
DataNodeFaultInjector.set(dnFaultInjector);
Configuration conf = new HdfsConfiguration();
conf.set(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, "1000");
conf.set(HdfsClientConfigKeys.
BlockWrite.ReplaceDatanodeOnFailure.POLICY_KEY, "ALWAYS");
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
FSDataOutputStream out = fs.create(new Path("noheartbeat.dat"), (short)2);
out.write(0x31);
out.hflush();
out.close();
} finally {
if (cluster != null) {
cluster.shutdown();
}
DataNodeFaultInjector.set(oldDnInjector);
}
}
// Test to verify that blocks are no longer corrupted after HDFS-4660.
// Revert HDFS-4660 and the other related ones (HDFS-9220, HDFS-8722), this
// test would fail.
// Scenario: Prior to the fix, block get corrupted when the transferBlock
// happens during pipeline recovery with extra bytes to make up the end of
// chunk.
// For verification, Need to fail the pipeline for last datanode when the
// second datanode have more bytes on disk than already acked bytes.
// This will enable to transfer extra bytes to the newNode to makeup
// end-of-chunk during pipeline recovery. This is achieved by the customized
// DataNodeFaultInjector class in this test.
// For detailed info, please refer to HDFS-4660 and HDFS-10587. HDFS-9220
// fixes an issue in HDFS-4660 patch, and HDFS-8722 is an optimization.
@Test
public void testPipelineRecoveryWithTransferBlock() throws Exception {
final int chunkSize = 512;
final int oneWriteSize = 5000;
final int totalSize = 1024 * 1024;
final int errorInjectionPos = 512;
Configuration conf = new HdfsConfiguration();
// Need 4 datanodes to verify the replaceDatanode during pipeline recovery
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
DataNodeFaultInjector old = DataNodeFaultInjector.get();
try {
DistributedFileSystem fs = cluster.getFileSystem();
Path fileName = new Path("/f");
FSDataOutputStream o = fs.create(fileName);
int count = 0;
// Flush to get the pipeline created.
o.writeBytes("hello");
o.hflush();
DFSOutputStream dfsO = (DFSOutputStream) o.getWrappedStream();
final DatanodeInfo[] pipeline = dfsO.getStreamer().getNodes();
final String lastDn = pipeline[2].getXferAddr(false);
final AtomicBoolean failed = new AtomicBoolean(false);
DataNodeFaultInjector.set(new DataNodeFaultInjector() {
@Override
public void failPipeline(ReplicaInPipeline replicaInfo,
String mirror) throws IOException {
if (!lastDn.equals(mirror)) {
// Only fail for second DN
return;
}
if (!failed.get() &&
(replicaInfo.getBytesAcked() > errorInjectionPos) &&
(replicaInfo.getBytesAcked() % chunkSize != 0)) {
int count = 0;
while (count < 10) {
// Fail the pipeline (Throw exception) when:
// 1. bytsAcked is not at chunk boundary (checked in the if
// statement above)
// 2. bytesOnDisk is bigger than bytesAcked and at least
// reaches (or go beyond) the end of the chunk that
// bytesAcked is in (checked in the if statement below).
// At this condition, transferBlock that happens during
// pipeline recovery would transfer extra bytes to make up to the
// end of the chunk. And this is when the block corruption
// described in HDFS-4660 would occur.
if ((replicaInfo.getBytesOnDisk() / chunkSize) -
(replicaInfo.getBytesAcked() / chunkSize) >= 1) {
failed.set(true);
throw new IOException(
"Failing Pipeline " + replicaInfo.getBytesAcked() + " : "
+ replicaInfo.getBytesOnDisk());
}
try {
Thread.sleep(200);
} catch (InterruptedException e) {
}
count++;
}
}
}
});
Random r = new Random();
byte[] b = new byte[oneWriteSize];
while (count < totalSize) {
r.nextBytes(b);
o.write(b);
count += oneWriteSize;
o.hflush();
}
assertTrue("Expected a failure in the pipeline", failed.get());
DatanodeInfo[] newNodes = dfsO.getStreamer().getNodes();
o.close();
// Trigger block report to NN
for (DataNode d: cluster.getDataNodes()) {
DataNodeTestUtils.triggerBlockReport(d);
}
// Read from the replaced datanode to verify the corruption. So shutdown
// all other nodes in the pipeline.
List<DatanodeInfo> pipelineList = Arrays.asList(pipeline);
DatanodeInfo newNode = null;
for (DatanodeInfo node : newNodes) {
if (!pipelineList.contains(node)) {
newNode = node;
break;
}
}
LOG.info("Number of nodes in pipeline: {} newNode {}",
newNodes.length, newNode.getName());
// shutdown old 2 nodes
for (int i = 0; i < newNodes.length; i++) {
if (newNodes[i].getName().equals(newNode.getName())) {
continue;
}
LOG.info("shutdown {}", newNodes[i].getName());
cluster.stopDataNode(newNodes[i].getName());
}
// Read should be successfull from only the newNode. There should not be
// any corruption reported.
DFSTestUtil.readFile(fs, fileName);
} finally {
DataNodeFaultInjector.set(old);
cluster.shutdown();
}
}
}
| |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.waveprotocol.wave.model.document.indexed;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import org.waveprotocol.wave.model.document.AnnotationSetTestBase;
import org.waveprotocol.wave.model.operation.OperationException;
import org.waveprotocol.wave.model.util.CollectionFactory;
import org.waveprotocol.wave.model.util.CollectionUtils;
/**
* Some basic tests for AnnotationTree.
*
* @author ohler@google.com (Christian Ohler)
*/
public class AnnotationTreeTest extends AnnotationSetTestBase {
@Override
protected AnnotationTree<Object> getNewSet(AnnotationSetListener<Object> listener) {
return new AnnotationTree<Object>(new Object(), new Object(), listener);
}
CollectionFactory getFactory() {
return CollectionUtils.getCollectionFactory();
}
void setAnnotation(RawAnnotationSet<Object> a, int start, int end, String key, Object value)
throws OperationException {
a.begin();
if (start > 0) {
a.skip(start);
}
a.startAnnotation(key, value);
if (end - start > 0) {
a.skip(end - start);
}
a.endAnnotation(key);
a.finish();
}
void insert(RawAnnotationSet<Object> a, int firstShiftedIndex, int length)
throws OperationException {
a.begin();
if (firstShiftedIndex > 0) {
a.skip(firstShiftedIndex);
}
if (length > 0) {
a.insert(length);
}
a.finish();
}
void delete(RawAnnotationSet<Object> a, int start, int length) throws OperationException {
a.begin();
if (start > 0) {
a.skip(start);
}
if (length > 0) {
a.delete(length);
}
a.finish();
}
public void testOffBalance() throws OperationException {
AnnotationTree<Object> tree = new AnnotationTree<Object>(new Object(),
new Object(), null);
final int size = 15;
insert(tree, 0, size);
for (int i = 0; i < size - 1; i++) {
setAnnotation(tree, i, i + 1, "a", "" + i);
tree.checkSomeInvariants();
}
}
public void testRemoveAll() throws OperationException {
AnnotationTree<Object> tree = new AnnotationTree<Object>(new Object(),
new Object(), null);
insert(tree, 0, 1);
setAnnotation(tree, 0, 1, "a", "0");
setAnnotation(tree, 0, 1, "a", null);
delete(tree, 0, 1);
}
public void testEraseMergeDuringSetAnnotation() throws OperationException {
{
AnnotationTree<Object> tree = new AnnotationTree<Object>(new Object(),
new Object(), null);
insert(tree, 0, 3);
setAnnotation(tree, 0, 1, "a", "1");
setAnnotation(tree, 1, 2, "a", "2");
setAnnotation(tree, 2, 3, "a", "3");
setAnnotation(tree, 0, 2, "a", "5");
}
{
AnnotationTree<Object> tree = new AnnotationTree<Object>(new Object(),
new Object(), null);
insert(tree, 0, 3);
setAnnotation(tree, 0, 1, "a", "1");
setAnnotation(tree, 1, 2, "a", "2");
setAnnotation(tree, 2, 3, "a", "3");
setAnnotation(tree, 1, 3, "a", "5");
}
}
public void testSplitAnnotations() throws OperationException {
AnnotationTree<Object> tree = new AnnotationTree<Object>(new Object(),
new Object(), null);
// The test is that none of this throws an exception.
tree.begin();
tree.startAnnotation("a", "1");
tree.insert(1);
tree.startAnnotation("a", "2");
tree.insert(5);
tree.startAnnotation("a", "1");
tree.insert(1);
tree.endAnnotation("a");
tree.finish();
tree.checkSomeInvariants();
// cut off one item on the left
tree.begin();
tree.skip(2);
tree.startAnnotation("a", "3");
tree.skip(4);
tree.endAnnotation("a");
tree.finish();
tree.checkSomeInvariants();
// cut off one item on the right
tree.begin();
tree.skip(2);
tree.startAnnotation("a", "4");
tree.skip(3);
tree.endAnnotation("a");
tree.finish();
tree.checkSomeInvariants();
// cut off one item on the left and one on the right
tree.begin();
tree.skip(3);
tree.startAnnotation("a", "5");
tree.skip(1);
tree.endAnnotation("a");
tree.finish();
tree.checkSomeInvariants();
}
@SuppressWarnings("unchecked")
public void testListenerBasics() throws OperationException {
final AnnotationSetListener<Object> listener = mock(AnnotationSetListener.class);
RawAnnotationSet<Object> m = getNewSet(listener);
m.begin();
m.startAnnotation("a", "1");
m.insert(1);
m.endAnnotation("a");
m.insert(1);
m.startAnnotation("a", "2");
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.startAnnotation("a", "2");
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.endAnnotation("a");
m.finish();
m.begin();
m.startAnnotation("a", "1");
m.skip(6);
m.endAnnotation("a");
m.finish();
verify(listener).onAnnotationChange(0, 1, "a", "1");
verify(listener).onAnnotationChange(2, 3, "a", "2");
verify(listener).onAnnotationChange(3, 4, "a", "1");
verify(listener).onAnnotationChange(4, 5, "a", "2");
verify(listener).onAnnotationChange(5, 6, "a", "1");
// These assertions are too strict; the way the AnnotationSet splits its
// notifications is actually undefined, and there would be several
// alternatives here.
verify(listener).onAnnotationChange(0, 6, "a", "1");
}
@SuppressWarnings("unchecked")
public void testListenerBasics2() throws OperationException {
final AnnotationSetListener<Object> listener = mock(AnnotationSetListener.class);
RawAnnotationSet<Object> m = getNewSet(listener);
m.begin();
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.startAnnotation("a", null);
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.startAnnotation("a", null);
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.endAnnotation("a");
m.finish();
m.begin();
m.startAnnotation("a", "1");
m.skip(6);
m.endAnnotation("a");
m.finish();
verify(listener).onAnnotationChange(1, 2, "a", "1");
verify(listener).onAnnotationChange(2, 3, "a", null);
verify(listener).onAnnotationChange(3, 4, "a", "1");
verify(listener).onAnnotationChange(4, 5, "a", null);
verify(listener).onAnnotationChange(5, 6, "a", "1");
// These assertions are too strict; the way the AnnotationSet splits its
// notifications is actually undefined, and there would be several
// alternatives here.
verify(listener).onAnnotationChange(0, 6, "a", "1");
}
@SuppressWarnings("unchecked")
public void testModificationFromListener() throws OperationException {
final int callCounter[] = new int[] { 0 };
// Chicken-and-egg problem: listener needs a reference to m in a final local
// variable declared before it, m's constructor needs listener.
final RawAnnotationSet<Object> m1[] = new RawAnnotationSet[1];
AnnotationSetListener<Object> listener = new AnnotationSetListener<Object>() {
@Override
public void onAnnotationChange(int start, int end, String key, Object newValue) {
switch (callCounter[0]) {
case 0:
assertEquals(1, start);
assertEquals(2, end);
assertEquals("a", key);
assertEquals("1", newValue);
break;
case 1:
assertEquals(2, start);
assertEquals(3, end);
assertEquals("a", key);
assertEquals(null, newValue);
break;
case 2:
assertEquals(0, start);
assertEquals(3, end);
assertEquals("a", key);
assertEquals("1", newValue);
m1[0].begin();
m1[0].startAnnotation("b", "1");
m1[0].skip(2);
m1[0].endAnnotation("b");
m1[0].finish();
break;
case 3:
assertEquals(0, start);
assertEquals(2, end);
assertEquals("b", key);
assertEquals("1", newValue);
break;
default:
fail();
}
callCounter[0]++;
}
};
RawAnnotationSet<Object> m = getNewSet(listener);
m1[0] = m;
m.begin();
m.insert(1);
m.startAnnotation("a", "1");
m.insert(1);
m.startAnnotation("a", null);
m.insert(1);
m.endAnnotation("a");
m.finish();
m.begin();
m.startAnnotation("a", "1");
m.skip(3);
m.endAnnotation("a");
m.finish();
assertEquals(4, callCounter[0]);
}
// The behavior tested here is not currently implemented in
// SimpleAnnotationSet.
public void testDoubleBeginFailsHard() {
RawAnnotationSet<Object> m = getNew();
m.begin();
try {
m.begin();
fail();
} catch (IllegalStateException e) {
// ok
}
}
// The behavior tested here is not currently implemented in
// SimpleAnnotationSet.
public void testUnmatchedFinishFailsHard() {
{
RawAnnotationSet<Object> m = getNew();
try {
m.finish();
fail();
} catch (IllegalStateException e) {
// ok
}
}
{
RawAnnotationSet<Object> m = getNew();
m.begin();
m.finish();
try {
m.finish();
fail();
} catch (IllegalStateException e) {
// ok
}
}
}
public void testConstructorChecksArguments() {
try {
new AnnotationTree<String>(null, "a", null);
fail();
} catch (NullPointerException e) {
// ok
}
try {
new AnnotationTree<String>("a", null, null);
fail();
} catch (NullPointerException e) {
// ok
}
try {
new AnnotationTree<String>("a", "a", null);
fail();
} catch (IllegalArgumentException e) {
// ok
}
try {
new AnnotationTree<String>("b", "b", null);
fail();
} catch (IllegalArgumentException e) {
// ok
}
// Should not throw.
new AnnotationTree<String>("a", "b", null);
}
public void testCleanupKnownKeys() {
AnnotationTree<Object> t = getNewSet(null);
t.begin();
t.startAnnotation("a", "1");
t.insert(10);
t.endAnnotation("a");
t.finish();
assertEquals(1, t.knownKeys().countEntries());
t.begin();
t.startAnnotation("a", null);
t.skip(10);
t.endAnnotation("a");
t.finish();
assertEquals(0, t.knownKeys().countEntries());
t.begin();
t.startAnnotation("a", null);
t.skip(4);
t.endAnnotation("a");
t.finish();
assertEquals(0, t.knownKeys().countEntries());
t.begin();
t.startAnnotation("a", "1");
t.insert(10);
t.startAnnotation("a", "2");
t.skip(10);
t.endAnnotation("a");
t.finish();
assertEquals(1, t.knownKeys().countEntries());
t.begin();
t.skip(2);
t.startAnnotation("b", "1");
t.skip(18);
t.endAnnotation("b");
t.finish();
assertEquals(2, t.knownKeys().countEntries());
t.begin();
t.startAnnotation("b", null);
t.skip(5);
t.endAnnotation("b");
t.delete(15);
t.finish();
assertEquals(1, t.knownKeys().countEntries());
t.begin();
t.delete(2);
t.startAnnotation("a", null);
t.skip(3);
t.endAnnotation("a");
t.finish();
assertEquals(0, t.knownKeys().countEntries());
}
}
| |
/*
* To change this template, choose Tools | Templates and open the template in the editor.
*/
package com.blazebit.security.web.bean.base;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
import org.primefaces.model.DefaultTreeNode;
import org.primefaces.model.TreeNode;
import com.blazebit.security.PermissionUtils;
import com.blazebit.security.data.PermissionDataAccess;
import com.blazebit.security.data.PermissionHandling;
import com.blazebit.security.data.PermissionManager;
import com.blazebit.security.entity.EntityActionFactory;
import com.blazebit.security.entity.EntityPermissionUtils;
import com.blazebit.security.entity.EntityResourceFactory;
import com.blazebit.security.entity.EntityResourceMetamodel;
import com.blazebit.security.entity.UserContext;
import com.blazebit.security.model.Permission;
import com.blazebit.security.model.Role;
import com.blazebit.security.model.Subject;
import com.blazebit.security.service.PermissionService;
import com.blazebit.security.spi.ActionFactory;
import com.blazebit.security.spi.PermissionFactory;
import com.blazebit.security.spi.ResourceFactory;
import com.blazebit.security.web.bean.main.DialogBean;
import com.blazebit.security.web.bean.model.TreeNodeModel.Marking;
import com.blazebit.security.web.context.UserSession;
/**
*
* @author cuszk
*/
@ViewScoped
@ManagedBean(name = "permissionHandlingBaseBean")
@Named
public class PermissionHandlingBaseBean extends PermissionTreeHandlingBaseBean {
/**
*
*/
private static final long serialVersionUID = 1L;
@Inject
protected PermissionFactory permissionFactory;
@Inject
protected UserSession userSession;
@Inject
protected UserContext userContext;
@Inject
protected PermissionDataAccess permissionDataAccess;
@Inject
protected PermissionManager permissionManager;
@Inject
protected EntityResourceFactory entityFieldFactory;
@Inject
protected ResourceFactory resourceFactory;
@Inject
protected ActionFactory actionFactory;
@Inject
protected PermissionService permissionService;
@Inject
protected EntityActionFactory actionUtils;
@Inject
protected EntityResourceMetamodel resourceMetamodel;
@Inject
protected PermissionHandling permissionHandling;
@Inject
protected DialogBean dialogBean;
/**
*
* @param allPermissions
* @param selectedPermissions
* @param prevRevoked
* @param prevReplaced
* @param hideFieldLevel
* @return
*/
protected TreeNode rebuildCurrentTree(List<Permission> allPermissions, Set<Permission> selectedPermissions, Set<Permission> prevRevoked, Set<Permission> prevReplaced, boolean hideFieldLevel) {
DefaultTreeNode root = new DefaultTreeNode();
return rebuildCurrentTree(root, allPermissions, selectedPermissions, prevRevoked, prevReplaced, hideFieldLevel);
}
/**
*
* @param node
* @param allPermissions
* @param selectedPermissions
* @param prevRevoked
* @param prevReplaced
* @param hideFieldLevel
* @return
*/
protected TreeNode rebuildCurrentTree(TreeNode node, List<Permission> allPermissions, Set<Permission> selectedPermissions, Set<Permission> prevRevoked, Set<Permission> prevReplaced, boolean hideFieldLevel) {
List<Permission> userPermissions = EntityPermissionUtils.getSeparatedPermissionsByResource(allPermissions).get(0);
List<Permission> userDataPermissions = EntityPermissionUtils.getSeparatedPermissionsByResource(allPermissions).get(1);
// add back previously replaced
for (Permission replacedPermission : prevReplaced) {
if (!PermissionUtils.implies(selectedPermissions, replacedPermission)) {
selectedPermissions.add(replacedPermission);
}
}
Set<Permission> revoked = new HashSet<Permission>();
// add back previously revoked
for (Permission revokedPermission : prevRevoked) {
if (!PermissionUtils.implies(selectedPermissions, revokedPermission)) {
revoked.add(revokedPermission);
}
}
Set<Permission> replaced = permissionHandling.getReplacedByGranting(allPermissions, selectedPermissions);
List<Set<Permission>> revoke = permissionHandling.getRevokableFromSelected(allPermissions, concat(allPermissions, selectedPermissions));
revoked.addAll(revoke.get(0));
dialogBean.setNotRevoked(revoke.get(1));
Set<Permission> removablePermissions = new HashSet<Permission>();
removablePermissions.addAll(revoked);
removablePermissions.addAll(replaced);
// current permission tree
return getImmutablePermissionTree(node, userPermissions, userDataPermissions, removablePermissions, new HashSet<Permission>(), Marking.REMOVED, Marking.NONE,
hideFieldLevel);
}
protected Set<Permission> concat(Collection<Permission> current, Collection<Permission> added) {
Set<Permission> ret = new HashSet<Permission>();
ret.addAll(current);
ret.addAll(added);
return ret;
}
protected List<Set<Permission>> executeRevokeAndGrant(Role role, Collection<Permission> current, Set<Permission> selected, Set<Permission> prevRevoked, Set<Permission> prevReplaced) {
return executeRevokeAndGrant(role, current, selected, prevRevoked, prevReplaced, false);
}
protected List<Set<Permission>> executeRevokeAndGrant(Role role, Collection<Permission> current, Set<Permission> selected, Set<Permission> prevRevoked, Set<Permission> prevReplaced, boolean simulate) {
Set<Permission> revoked = new HashSet<Permission>();
// add back previous revoked permisions
for (Permission permission : prevRevoked) {
if (!PermissionUtils.implies(selected, permission)) {
revoked.add(permission);
}
}
// add back previous replaced permisssion if no overriding permission exists in the current selected ones
for (Permission permission : prevReplaced) {
if (!PermissionUtils.implies(selected, permission)) {
selected.add(permission);
}
}
revoked.addAll(permissionHandling.getRevokableFromSelected(current, concat(current, selected)).get(0));
Set<Permission> granted = permissionHandling.getGrantable(PermissionUtils.removeAll(current, revoked), selected).get(0);
return performOperations(role, current, revoked, granted, simulate);
}
protected List<Set<Permission>> executeRevokeAndGrant(Subject subject, Collection<Permission> current, Set<Permission> selected, Set<Permission> prevRevoked, Set<Permission> prevReplaced) {
return executeRevokeAndGrant(subject, current, selected, prevRevoked, prevReplaced, false);
}
protected List<Set<Permission>> executeRevokeAndGrant(Subject subject, Collection<Permission> current, Set<Permission> selected, Set<Permission> prevRevoked, Set<Permission> prevReplaced, boolean simulate) {
Set<Permission> revoked = new HashSet<Permission>();
// add back previous revoked permisions
for (Permission permission : prevRevoked) {
if (!PermissionUtils.implies(selected, permission)) {
revoked.add(permission);
}
}
// add back previous replaced permisssion if no overriding permission exists in the current selected ones
for (Permission permission : prevReplaced) {
if (!PermissionUtils.implies(selected, permission)) {
selected.add(permission);
}
}
revoked.addAll(permissionHandling.getRevokableFromSelected(current, concat(current, selected)).get(0));
Set<Permission> granted = permissionHandling.getGrantable(PermissionUtils.removeAll(current, revoked), selected).get(0);
return performOperations(subject, current, revoked, granted, simulate);
}
protected List<Set<Permission>> performOperations(Subject subject, Collection<Permission> current, Set<Permission> revoked, Set<Permission> granted) {
return performOperations(subject, current, revoked, granted, false);
}
protected List<Set<Permission>> performOperations(Subject subject, Collection<Permission> current, Set<Permission> revoked, Set<Permission> granted, boolean simulate) {
List<Set<Permission>> permissions = permissionHandling.getRevokedAndGrantedAfterMerge(current, revoked, granted);
Set<Permission> finalRevoked = permissions.get(0);
Set<Permission> finalGranted = permissions.get(1);
return revokeAndGrant(userContext.getUser(), subject, finalRevoked, finalGranted, simulate);
}
protected List<Set<Permission>> revokeAndGrant(Subject subject, Set<Permission> finalRevoked, Set<Permission> finalGranted) {
return revokeAndGrant(userContext.getUser(), subject, finalRevoked, finalGranted, false);
}
protected List<Set<Permission>> revokeAndGrant(Subject authorizer, Subject subject, Set<Permission> finalRevoked, Set<Permission> finalGranted) {
return revokeAndGrant(authorizer, subject, finalRevoked, finalGranted, false);
}
protected List<Set<Permission>> revokeAndGrant(Subject authorizer, Subject subject, Set<Permission> finalRevoked, Set<Permission> finalGranted, boolean simulate) {
if (!simulate) {
if (subject.equals(authorizer)) {
authorizer = userSession.getAdmin();
}
permissionService.revokeAndGrant(authorizer, subject, finalRevoked, finalGranted);
}
List<Set<Permission>> ret = new ArrayList<Set<Permission>>();
ret.add(finalRevoked);
ret.add(finalGranted);
return ret;
}
protected List<Set<Permission>> performOperations(Role role, Collection<Permission> current, Set<Permission> revoked, Set<Permission> granted) {
return performOperations(role, current, revoked, granted, false);
}
protected List<Set<Permission>> performOperations(Role role, Collection<Permission> current, Set<Permission> revoked, Set<Permission> granted, boolean simulate) {
List<Set<Permission>> permissions = permissionHandling.getRevokedAndGrantedAfterMerge(current, revoked, granted);
Set<Permission> finalRevoked = permissions.get(0);
Set<Permission> finalGranted = permissions.get(1);
return revokeAndGrant(role, finalRevoked, finalGranted, simulate);
}
protected List<Set<Permission>> revokeAndGrant(Role role, Set<Permission> finalRevoked, Set<Permission> finalGranted, boolean simulate) {
if (!simulate) {
permissionService.revokeAndGrant(userContext.getUser(), role, finalRevoked, finalGranted);
}
List<Set<Permission>> ret = new ArrayList<Set<Permission>>();
ret.add(finalRevoked);
ret.add(finalGranted);
return ret;
}
}
| |
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.kubernetes.client.mock;
import io.fabric8.kubernetes.api.model.KubernetesListBuilder;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerBuilder;
import io.fabric8.kubernetes.api.model.ReplicationControllerList;
import io.fabric8.kubernetes.api.model.ReplicationControllerListBuilder;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.server.mock.KubernetesServer;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import java.util.HashMap;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class ReplicationControllerTest {
@Rule
public KubernetesServer server = new KubernetesServer();
@Test
public void testList() {
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers").andReturn(200, new ReplicationControllerListBuilder().build()).once();
server.expect().withPath("/api/v1/namespaces/ns1/replicationcontrollers").andReturn(200, new ReplicationControllerListBuilder()
.addNewItem().and()
.addNewItem().and().build())
.once();
KubernetesClient client = server.getClient();
ReplicationControllerList replicationControllerList = client.replicationControllers().list();
assertNotNull(replicationControllerList);
assertEquals(0, replicationControllerList.getItems().size());
replicationControllerList = client.replicationControllers().inNamespace("ns1").list();
assertNotNull(replicationControllerList);
assertEquals(2, replicationControllerList.getItems().size());
}
@Test
public void testGet() {
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder().build()).once();
server.expect().withPath("/api/v1/namespaces/ns1/replicationcontrollers/repl2").andReturn(200, new ReplicationControllerBuilder().build()).once();
KubernetesClient client = server.getClient();
ReplicationController repl1 = client.replicationControllers().withName("repl1").get();
assertNotNull(repl1);
repl1 = client.replicationControllers().withName("repl2").get();
assertNull(repl1);
repl1 = client.replicationControllers().inNamespace("ns1").withName("repl2").get();
assertNotNull(repl1);
}
@Test
public void testDelete() {
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder() .withNewMetadata()
.withName("repl1")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.endStatus()
.build()).once();
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder() .withNewMetadata()
.withName("repl1")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(0)
.endStatus()
.build()).times(5);
server.expect().withPath("/api/v1/namespaces/ns1/replicationcontrollers/repl2").andReturn(200, new ReplicationControllerBuilder() .withNewMetadata()
.withName("repl2")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(1)
.endStatus()
.build()).once();
server.expect().withPath("/api/v1/namespaces/ns1/replicationcontrollers/repl2").andReturn(200, new ReplicationControllerBuilder() .withNewMetadata()
.withName("repl2")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(0)
.endSpec()
.withNewStatus()
.withReplicas(0)
.endStatus()
.build()).times(5);
KubernetesClient client = server.getClient();
Boolean deleted = client.replicationControllers().withName("repl1").delete();
assertNotNull(deleted);
deleted = client.replicationControllers().withName("repl2").delete();
assertFalse(deleted);
deleted = client.replicationControllers().inNamespace("ns1").withName("repl2").delete();
assertTrue(deleted);
}
@Test
public void testScale() {
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder()
.withNewMetadata()
.withName("repl1")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(5)
.endSpec()
.withNewStatus()
.withReplicas(1)
.endStatus()
.build()).always();
KubernetesClient client = server.getClient();
ReplicationController repl = client.replicationControllers().withName("repl1").scale(5);
assertNotNull(repl);
assertNotNull(repl.getSpec());
assertEquals(5, repl.getSpec().getReplicas().intValue());
assertEquals(1, repl.getStatus().getReplicas().intValue());
}
@Test
public void testScaleAndWait() {
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder()
.withNewMetadata()
.withName("repl1")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(5)
.endSpec()
.withNewStatus()
.withReplicas(1)
.endStatus()
.build()).once();
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, new ReplicationControllerBuilder()
.withNewMetadata()
.withName("repl1")
.withResourceVersion("1")
.endMetadata()
.withNewSpec()
.withReplicas(5)
.endSpec()
.withNewStatus()
.withReplicas(5)
.endStatus()
.build()).always();
KubernetesClient client = server.getClient();
ReplicationController repl = client.replicationControllers().withName("repl1").scale(5, true);
assertNotNull(repl);
assertNotNull(repl.getSpec());
assertEquals(5, repl.getSpec().getReplicas().intValue());
assertEquals(5, repl.getStatus().getReplicas().intValue());
}
@Ignore
@Test
public void testUpdate() {
ReplicationController repl1 = new ReplicationControllerBuilder()
.withNewMetadata()
.withName("repl1")
.withNamespace("test")
.endMetadata()
.withNewSpec()
.withReplicas(1)
.withNewTemplate()
.withNewMetadata().withLabels(new HashMap<String, String>()).endMetadata()
.withNewSpec()
.addNewContainer()
.withImage("img1")
.endContainer()
.endSpec()
.endTemplate()
.endSpec()
.withNewStatus().withReplicas(1).endStatus()
.build();
server.expect().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, repl1).once();
server.expect().put().withPath("/api/v1/namespaces/test/replicationcontrollers/repl1").andReturn(200, repl1).once();
server.expect().get().withPath("/api/v1/namespaces/test/replicationcontrollers").andReturn(200, new ReplicationControllerListBuilder().withItems(repl1).build()).once();
server.expect().post().withPath("/api/v1/namespaces/test/replicationcontrollers").andReturn(201, repl1).once();
server.expect().withPath("/api/v1/namespaces/test/pods").andReturn(200, new KubernetesListBuilder().build()).once();
KubernetesClient client = server.getClient();
repl1 = client.replicationControllers().withName("repl1")
.rolling()
.withTimeout(5, TimeUnit.MINUTES)
.updateImage("");
assertNotNull(repl1);
}
}
| |
/*
* Copyright (C) 2013 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import okhttp3.RealCall.AsyncCall;
import okhttp3.internal.Util;
/**
* Policy on when async requests are executed.
*
* <p>Each dispatcher uses an {@link ExecutorService} to run calls internally. If you supply your
* own executor, it should be able to run {@linkplain #getMaxRequests the configured maximum} number
* of calls concurrently.
*/
public final class Dispatcher {
private int maxRequests = 64;
private int maxRequestsPerHost = 5;
private @Nullable Runnable idleCallback;
/** Executes calls. Created lazily. */
private @Nullable ExecutorService executorService;
/** Ready async calls in the order they'll be run. */
private final Deque<AsyncCall> readyAsyncCalls = new ArrayDeque<>();
/** Running asynchronous calls. Includes canceled calls that haven't finished yet. */
private final Deque<AsyncCall> runningAsyncCalls = new ArrayDeque<>();
/** Running synchronous calls. Includes canceled calls that haven't finished yet. */
private final Deque<RealCall> runningSyncCalls = new ArrayDeque<>();
public Dispatcher(ExecutorService executorService) {
this.executorService = executorService;
}
public Dispatcher() {
}
public synchronized ExecutorService executorService() {
if (executorService == null) {
executorService = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 60, TimeUnit.SECONDS,
new SynchronousQueue<Runnable>(), Util.threadFactory("OkHttp Dispatcher", false));
}
return executorService;
}
/**
* Set the maximum number of requests to execute concurrently. Above this requests queue in
* memory, waiting for the running calls to complete.
*
* <p>If more than {@code maxRequests} requests are in flight when this is invoked, those requests
* will remain in flight.
*/
public synchronized void setMaxRequests(int maxRequests) {
if (maxRequests < 1) {
throw new IllegalArgumentException("max < 1: " + maxRequests);
}
this.maxRequests = maxRequests;
promoteCalls();
}
public synchronized int getMaxRequests() {
return maxRequests;
}
/**
* Set the maximum number of requests for each host to execute concurrently. This limits requests
* by the URL's host name. Note that concurrent requests to a single IP address may still exceed
* this limit: multiple hostnames may share an IP address or be routed through the same HTTP
* proxy.
*
* <p>If more than {@code maxRequestsPerHost} requests are in flight when this is invoked, those
* requests will remain in flight.
*/
public synchronized void setMaxRequestsPerHost(int maxRequestsPerHost) {
if (maxRequestsPerHost < 1) {
throw new IllegalArgumentException("max < 1: " + maxRequestsPerHost);
}
this.maxRequestsPerHost = maxRequestsPerHost;
promoteCalls();
}
public synchronized int getMaxRequestsPerHost() {
return maxRequestsPerHost;
}
/**
* Set a callback to be invoked each time the dispatcher becomes idle (when the number of running
* calls returns to zero).
*
* <p>Note: The time at which a {@linkplain Call call} is considered idle is different depending
* on whether it was run {@linkplain Call#enqueue(Callback) asynchronously} or
* {@linkplain Call#execute() synchronously}. Asynchronous calls become idle after the
* {@link Callback#onResponse onResponse} or {@link Callback#onFailure onFailure} callback has
* returned. Synchronous calls become idle once {@link Call#execute() execute()} returns. This
* means that if you are doing synchronous calls the network layer will not truly be idle until
* every returned {@link Response} has been closed.
*/
public synchronized void setIdleCallback(@Nullable Runnable idleCallback) {
this.idleCallback = idleCallback;
}
synchronized void enqueue(AsyncCall call) {
if (runningAsyncCalls.size() < maxRequests && runningCallsForHost(call) < maxRequestsPerHost) {
runningAsyncCalls.add(call);
executorService().execute(call);
} else {
readyAsyncCalls.add(call);
}
}
/**
* Cancel all calls currently enqueued or executing. Includes calls executed both {@linkplain
* Call#execute() synchronously} and {@linkplain Call#enqueue asynchronously}.
*/
public synchronized void cancelAll() {
for (AsyncCall call : readyAsyncCalls) {
call.get().cancel();
}
for (AsyncCall call : runningAsyncCalls) {
call.get().cancel();
}
for (RealCall call : runningSyncCalls) {
call.cancel();
}
}
private void promoteCalls() {
if (runningAsyncCalls.size() >= maxRequests) return; // Already running max capacity.
if (readyAsyncCalls.isEmpty()) return; // No ready calls to promote.
for (Iterator<AsyncCall> i = readyAsyncCalls.iterator(); i.hasNext(); ) {
AsyncCall call = i.next();
if (runningCallsForHost(call) < maxRequestsPerHost) {
i.remove();
runningAsyncCalls.add(call);
executorService().execute(call);
}
if (runningAsyncCalls.size() >= maxRequests) return; // Reached max capacity.
}
}
/** Returns the number of running calls that share a host with {@code call}. */
private int runningCallsForHost(AsyncCall call) {
int result = 0;
for (AsyncCall c : runningAsyncCalls) {
if (c.host().equals(call.host())) result++;
}
return result;
}
/** Used by {@code Call#execute} to signal it is in-flight. */
synchronized void executed(RealCall call) {
runningSyncCalls.add(call);
}
/** Used by {@code AsyncCall#run} to signal completion. */
void finished(AsyncCall call) {
finished(runningAsyncCalls, call, true);
}
/** Used by {@code Call#execute} to signal completion. */
void finished(RealCall call) {
finished(runningSyncCalls, call, false);
}
private <T> void finished(Deque<T> calls, T call, boolean promoteCalls) {
int runningCallsCount;
Runnable idleCallback;
synchronized (this) {
if (!calls.remove(call)) throw new AssertionError("Call wasn't in-flight!");
if (promoteCalls) promoteCalls();
runningCallsCount = runningCallsCount();
idleCallback = this.idleCallback;
}
if (runningCallsCount == 0 && idleCallback != null) {
idleCallback.run();
}
}
/** Returns a snapshot of the calls currently awaiting execution. */
public synchronized List<Call> queuedCalls() {
List<Call> result = new ArrayList<>();
for (AsyncCall asyncCall : readyAsyncCalls) {
result.add(asyncCall.get());
}
return Collections.unmodifiableList(result);
}
/** Returns a snapshot of the calls currently being executed. */
public synchronized List<Call> runningCalls() {
List<Call> result = new ArrayList<>();
result.addAll(runningSyncCalls);
for (AsyncCall asyncCall : runningAsyncCalls) {
result.add(asyncCall.get());
}
return Collections.unmodifiableList(result);
}
public synchronized int queuedCallsCount() {
return readyAsyncCalls.size();
}
public synchronized int runningCallsCount() {
return runningAsyncCalls.size() + runningSyncCalls.size();
}
}
| |
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.jcublas.buffer.factory;
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.FloatPointer;
import org.bytedeco.javacpp.IntPointer;
import org.bytedeco.javacpp.Pointer;
import org.bytedeco.javacpp.indexer.*;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.factory.DataBufferFactory;
import org.nd4j.linalg.api.memory.MemoryWorkspace;
import org.nd4j.linalg.jcublas.buffer.CudaDoubleDataBuffer;
import org.nd4j.linalg.jcublas.buffer.CudaFloatDataBuffer;
import org.nd4j.linalg.jcublas.buffer.CudaHalfDataBuffer;
import org.nd4j.linalg.jcublas.buffer.CudaIntDataBuffer;
import org.nd4j.linalg.util.ArrayUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
/**
* Creates cuda buffers
*
* @author Adam Gibson
*/
@Slf4j
public class CudaDataBufferFactory implements DataBufferFactory {
protected DataBuffer.AllocationMode allocationMode;
@Override
public void setAllocationMode(DataBuffer.AllocationMode allocationMode) {
this.allocationMode = allocationMode;
}
@Override
public DataBuffer.AllocationMode allocationMode() {
if (allocationMode == null) {
String otherAlloc = System.getProperty("alloc");
if (otherAlloc.equals("heap"))
setAllocationMode(DataBuffer.AllocationMode.HEAP);
else if (otherAlloc.equals("direct"))
setAllocationMode(DataBuffer.AllocationMode.DIRECT);
else if (otherAlloc.equals("javacpp"))
setAllocationMode(DataBuffer.AllocationMode.JAVACPP);
}
return allocationMode;
}
@Override
public DataBuffer create(DataBuffer underlyingBuffer, long offset, long length) {
if (underlyingBuffer.dataType() == DataBuffer.Type.DOUBLE) {
return new CudaDoubleDataBuffer(underlyingBuffer, length, offset);
} else if (underlyingBuffer.dataType() == DataBuffer.Type.FLOAT) {
return new CudaFloatDataBuffer(underlyingBuffer, length, offset);
} else if (underlyingBuffer.dataType() == DataBuffer.Type.INT) {
return new CudaIntDataBuffer(underlyingBuffer, length, offset);
} else if (underlyingBuffer.dataType() == DataBuffer.Type.HALF) {
return new CudaHalfDataBuffer(underlyingBuffer, length, offset);
}
return null;
}
/**
* This method will create new DataBuffer of the same dataType & same length
*
* @param buffer
* @return
*/
@Override
public DataBuffer createSame(DataBuffer buffer, boolean init) {
switch (buffer.dataType()) {
case INT:
return createInt(buffer.length(), init);
case FLOAT:
return createFloat(buffer.length(), init);
case DOUBLE:
return createDouble(buffer.length(), init);
case HALF:
return createHalf(buffer.length(), init);
default:
throw new UnsupportedOperationException("Unknown dataType: " + buffer.dataType());
}
}
/**
* This method will create new DataBuffer of the same dataType & same length
*
* @param buffer
* @param workspace
* @return
*/
@Override
public DataBuffer createSame(DataBuffer buffer, boolean init, MemoryWorkspace workspace) {
switch (buffer.dataType()) {
case INT:
return createInt(buffer.length(), init, workspace);
case FLOAT:
return createFloat(buffer.length(), init, workspace);
case DOUBLE:
return createDouble(buffer.length(), init, workspace);
case HALF:
return createHalf(buffer.length(), init, workspace);
default:
throw new UnsupportedOperationException("Unknown dataType: " + buffer.dataType());
}
}
@Override
public DataBuffer createFloat(float[] data, MemoryWorkspace workspace) {
return createFloat(data, true, workspace);
}
@Override
public DataBuffer createFloat(float[] data, boolean copy, MemoryWorkspace workspace) {
return new CudaFloatDataBuffer(data, copy, workspace);
}
@Override
public DataBuffer createInt(int[] data, MemoryWorkspace workspace) {
return new CudaIntDataBuffer(data, workspace);
}
@Override
public DataBuffer createInt(int[] data, boolean copy, MemoryWorkspace workspace) {
return new CudaIntDataBuffer(data, copy, workspace);
}
@Override
public DataBuffer createInt(long offset, ByteBuffer buffer, int length) {
return new CudaIntDataBuffer(buffer, length, offset);
}
@Override
public DataBuffer createFloat(long offset, ByteBuffer buffer, int length) {
return new CudaFloatDataBuffer(buffer, length, offset);
}
@Override
public DataBuffer createDouble(long offset, ByteBuffer buffer, int length) {
return new CudaDoubleDataBuffer(buffer, length, offset);
}
@Override
public DataBuffer createDouble(long offset, int length) {
return new CudaDoubleDataBuffer(length, 8, offset);
}
@Override
public DataBuffer createFloat(long offset, int length) {
return new CudaFloatDataBuffer(length, 4, length);
}
@Override
public DataBuffer createInt(long offset, int length) {
return new CudaIntDataBuffer(length, 4, offset);
}
@Override
public DataBuffer createDouble(long offset, int[] data) {
return new CudaDoubleDataBuffer(data, true, offset);
}
@Override
public DataBuffer createFloat(long offset, int[] data) {
return new CudaFloatDataBuffer(data, true, offset);
}
@Override
public DataBuffer createInt(long offset, int[] data) {
return new CudaIntDataBuffer(data, true, offset);
}
@Override
public DataBuffer createDouble(long offset, double[] data) {
return new CudaDoubleDataBuffer(data, true, offset);
}
@Override
public DataBuffer createDouble(long offset, double[] data, MemoryWorkspace workspace) {
return new CudaDoubleDataBuffer(data, true, offset, workspace);
}
@Override
public DataBuffer createDouble(long offset, byte[] data, int length) {
return new CudaDoubleDataBuffer(ArrayUtil.toDoubleArray(data), true, offset);
}
@Override
public DataBuffer createFloat(long offset, byte[] data, int length) {
return new CudaFloatDataBuffer(ArrayUtil.toDoubleArray(data), true, offset);
}
@Override
public DataBuffer createFloat(long offset, double[] data) {
return new CudaFloatDataBuffer(data, true, offset);
}
@Override
public DataBuffer createInt(long offset, double[] data) {
return new CudaIntDataBuffer(data, true, offset);
}
@Override
public DataBuffer createDouble(long offset, float[] data) {
return new CudaDoubleDataBuffer(data, true, offset);
}
@Override
public DataBuffer createFloat(long offset, float[] data) {
return new CudaFloatDataBuffer(data, true, offset);
}
@Override
public DataBuffer createFloat(long offset, float[] data, MemoryWorkspace workspace) {
return new CudaFloatDataBuffer(data, true, offset, workspace);
}
@Override
public DataBuffer createInt(long offset, float[] data) {
return new CudaIntDataBuffer(data, true, offset);
}
@Override
public DataBuffer createDouble(long offset, int[] data, boolean copy) {
return new CudaDoubleDataBuffer(data, true, offset);
}
@Override
public DataBuffer createFloat(long offset, int[] data, boolean copy) {
return new CudaFloatDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createInt(long offset, int[] data, boolean copy) {
return new CudaIntDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createDouble(long offset, double[] data, boolean copy) {
return new CudaDoubleDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createFloat(long offset, double[] data, boolean copy) {
return new CudaFloatDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createInt(long offset, double[] data, boolean copy) {
return new CudaIntDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createDouble(long offset, float[] data, boolean copy) {
return new CudaDoubleDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createFloat(long offset, float[] data, boolean copy) {
return new CudaFloatDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createInt(long offset, float[] data, boolean copy) {
return new CudaIntDataBuffer(data, copy, offset);
}
@Override
public DataBuffer createInt(ByteBuffer buffer, int length) {
return new CudaIntDataBuffer(buffer, length);
}
@Override
public DataBuffer createFloat(ByteBuffer buffer, int length) {
return new CudaFloatDataBuffer(buffer, length);
}
@Override
public DataBuffer createDouble(ByteBuffer buffer, int length) {
return new CudaDoubleDataBuffer(buffer, length);
}
@Override
public DataBuffer createDouble(long length) {
return new CudaDoubleDataBuffer(length);
}
@Override
public DataBuffer createDouble(long length, boolean initialize) {
return new CudaDoubleDataBuffer(length, initialize);
}
@Override
public DataBuffer createFloat(long length) {
return new CudaFloatDataBuffer(length);
}
@Override
public DataBuffer createFloat(long length, boolean initialize) {
return new CudaFloatDataBuffer(length, initialize);
}
@Override
public DataBuffer createFloat(long length, boolean initialize, MemoryWorkspace workspace) {
return new CudaFloatDataBuffer(length, initialize, workspace);
}
@Override
public DataBuffer createInt(long length) {
return new CudaIntDataBuffer(length);
}
@Override
public DataBuffer createInt(long length, boolean initialize) {
return new CudaIntDataBuffer(length, initialize);
}
@Override
public DataBuffer createInt(long length, boolean initialize, MemoryWorkspace workspace) {
return new CudaIntDataBuffer(length, initialize, workspace);
}
@Override
public DataBuffer createDouble(int[] data) {
return new CudaDoubleDataBuffer(ArrayUtil.toDoubles(data));
}
@Override
public DataBuffer createFloat(int[] data) {
return new CudaFloatDataBuffer(ArrayUtil.toFloats(data));
}
@Override
public DataBuffer createInt(int[] data) {
return new CudaIntDataBuffer(data);
}
@Override
public DataBuffer createDouble(double[] data) {
return new CudaDoubleDataBuffer(data);
}
@Override
public DataBuffer createDouble(byte[] data, int length) {
return new CudaDoubleDataBuffer(data, length);
}
@Override
public DataBuffer createFloat(byte[] data, int length) {
return new CudaFloatDataBuffer(data, length);
}
@Override
public DataBuffer createFloat(double[] data) {
return new CudaFloatDataBuffer(ArrayUtil.toFloats(data));
}
@Override
public DataBuffer createInt(double[] data) {
return new CudaIntDataBuffer(ArrayUtil.toInts(data));
}
@Override
public DataBuffer createDouble(float[] data) {
return new CudaDoubleDataBuffer(ArrayUtil.toDoubles(data));
}
@Override
public DataBuffer createFloat(float[] data) {
return new CudaFloatDataBuffer(data);
}
@Override
public DataBuffer createInt(float[] data) {
return new CudaIntDataBuffer(ArrayUtil.toInts(data));
}
@Override
public DataBuffer createDouble(int[] data, boolean copy) {
return new CudaDoubleDataBuffer(ArrayUtil.toDouble(data));
}
@Override
public DataBuffer createFloat(int[] data, boolean copy) {
return new CudaFloatDataBuffer(ArrayUtil.toFloats(data));
}
@Override
public DataBuffer createInt(int[] data, boolean copy) {
return new CudaIntDataBuffer(data);
}
@Override
public DataBuffer createDouble(double[] data, boolean copy) {
return new CudaDoubleDataBuffer(data);
}
@Override
public DataBuffer createFloat(double[] data, boolean copy) {
return new CudaFloatDataBuffer(ArrayUtil.toFloats(data));
}
@Override
public DataBuffer createInt(double[] data, boolean copy) {
return new CudaIntDataBuffer(ArrayUtil.toInts(data));
}
@Override
public DataBuffer createDouble(float[] data, boolean copy) {
return new CudaDoubleDataBuffer(ArrayUtil.toDoubles(data));
}
@Override
public DataBuffer createFloat(float[] data, boolean copy) {
return new CudaFloatDataBuffer(data);
}
@Override
public DataBuffer createInt(float[] data, boolean copy) {
return new CudaIntDataBuffer(ArrayUtil.toInts(data));
}
/**
* Create a data buffer based on the
* given pointer, data buffer type,
* and length of the buffer
*
* @param pointer the pointer to use
* @param type the type of buffer
* @param length the length of the buffer
* @param indexer
* @return the data buffer
* backed by this pointer with the given
* type and length.
*/
@Override
public DataBuffer create(Pointer pointer, DataBuffer.Type type, long length, Indexer indexer) {
switch (type) {
case INT:
return new CudaIntDataBuffer(pointer, indexer, length);
case DOUBLE:
return new CudaDoubleDataBuffer(pointer, indexer, length);
case FLOAT:
return new CudaFloatDataBuffer(pointer, indexer, length);
case HALF:
return new CudaHalfDataBuffer(pointer, indexer, length);
}
throw new IllegalArgumentException("Illegal type " + type);
}
/**
* @param doublePointer
* @param length
* @return
*/
@Override
public DataBuffer create(DoublePointer doublePointer, long length) {
return new CudaDoubleDataBuffer(doublePointer,DoubleIndexer.create(doublePointer),length);
}
/**
* @param intPointer
* @param length
* @return
*/
@Override
public DataBuffer create(IntPointer intPointer, long length) {
return new CudaIntDataBuffer(intPointer, IntIndexer.create(intPointer),length);
}
/**
* @param floatPointer
* @param length
* @return
*/
@Override
public DataBuffer create(FloatPointer floatPointer, long length) {
return new CudaFloatDataBuffer(floatPointer, FloatIndexer.create(floatPointer),length);
}
@Override
public DataBuffer createHalf(long length) {
return new CudaHalfDataBuffer(length);
}
@Override
public DataBuffer createHalf(long length, boolean initialize) {
return new CudaHalfDataBuffer(length, initialize);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(float[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(double[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, double[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, float[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, int[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, double[] data) {
return new CudaHalfDataBuffer(data, true, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, float[] data) {
return new CudaHalfDataBuffer(data, true, offset);
}
@Override
public DataBuffer createHalf(long offset, float[] data, MemoryWorkspace workspace) {
return new CudaHalfDataBuffer(data, true, offset, workspace);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, int[] data) {
return new CudaHalfDataBuffer(data, true, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, byte[] data, boolean copy) {
return new CudaHalfDataBuffer(ArrayUtil.toFloatArray(data), copy, offset);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @param copy
* @return the new buffer
*/
@Override
public DataBuffer createHalf(int[] data, boolean copy) {
return new CudaHalfDataBuffer(data, copy);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(float[] data) {
return new CudaHalfDataBuffer(data);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(double[] data) {
return new CudaHalfDataBuffer(data);
}
/**
* Creates a half-precision data buffer
*
* @param data the data to create the buffer from
* @return the new buffer
*/
@Override
public DataBuffer createHalf(int[] data) {
return new CudaHalfDataBuffer(data);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param data the data to create the buffer from
* @param length
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, byte[] data, int length) {
return new CudaHalfDataBuffer(ArrayUtil.toFloatArray(data), true, offset);
}
/**
* Creates a half-precision data buffer
*
* @param offset
* @param length
* @return the new buffer
*/
@Override
public DataBuffer createHalf(long offset, int length) {
return new CudaHalfDataBuffer(length);
}
/**
* Creates a half-precision data buffer
*
* @param buffer
* @param length
* @return the new buffer
*/
@Override
public DataBuffer createHalf(ByteBuffer buffer, int length) {
return new CudaHalfDataBuffer(buffer, length);
}
/**
* Creates a half-precision data buffer
*
* @param data
* @param length
* @return
*/
@Override
public DataBuffer createHalf(byte[] data, int length) {
return new CudaHalfDataBuffer(data, length);
}
@Override
public DataBuffer createDouble(long length, boolean initialize, MemoryWorkspace workspace) {
return new CudaDoubleDataBuffer(length, initialize, workspace);
}
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @param workspace
* @return the new buffer
*/
@Override
public DataBuffer createDouble(double[] data, MemoryWorkspace workspace) {
return createDouble(data, true, workspace);
}
/**
* Creates a double data buffer
*
* @param data the data to create the buffer from
* @param copy
* @param workspace @return the new buffer
*/
@Override
public DataBuffer createDouble(double[] data, boolean copy, MemoryWorkspace workspace) {
return new CudaDoubleDataBuffer(data, copy, workspace);
}
@Override
public DataBuffer createHalf(long length, boolean initialize, MemoryWorkspace workspace) {
return new CudaHalfDataBuffer(length, initialize, workspace);
}
@Override
public DataBuffer createHalf(float[] data, MemoryWorkspace workspace) {
return createHalf(data, true, workspace);
}
@Override
public DataBuffer createHalf(float[] data, boolean copy, MemoryWorkspace workspace) {
return new CudaHalfDataBuffer(data, copy, workspace);
}
}
| |
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.oss.driver.internal.core.config.typesafe;
import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.config.DriverConfigLoader;
import com.datastax.oss.driver.api.core.config.DriverExecutionProfile;
import com.datastax.oss.driver.api.core.config.DriverOption;
import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import net.jcip.annotations.NotThreadSafe;
@NotThreadSafe
public class DefaultProgrammaticDriverConfigLoaderBuilder
implements ProgrammaticDriverConfigLoaderBuilder {
public static final Supplier<Config> DEFAULT_FALLBACK_SUPPLIER =
() ->
ConfigFactory.defaultApplication()
// Do not remove root path here, it must be done after merging configs
.withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader()));
private final Map<String, Object> values = new HashMap<>();
private final Supplier<Config> fallbackSupplier;
private final String rootPath;
private String currentProfileName = DriverExecutionProfile.DEFAULT_NAME;
/**
* Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} with default
* settings.
*
* <p>Fallback configuration for options that haven't been specified programmatically will be
* obtained from standard classpath resources. Application-specific classpath resources will be
* located using the {@linkplain Thread#getContextClassLoader() the current thread's context class
* loader}. This might not be suitable for OSGi deployments, which should use {@link
* #DefaultProgrammaticDriverConfigLoaderBuilder(ClassLoader)} instead.
*/
public DefaultProgrammaticDriverConfigLoaderBuilder() {
this(DEFAULT_FALLBACK_SUPPLIER, DefaultDriverConfigLoader.DEFAULT_ROOT_PATH);
}
/**
* Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} with default
* settings but a custom class loader.
*
* <p>Fallback configuration for options that haven't been specified programmatically will be
* obtained from standard classpath resources. Application-specific classpath resources will be
* located using the provided {@link ClassLoader} instead of {@linkplain
* Thread#getContextClassLoader() the current thread's context class loader}.
*/
public DefaultProgrammaticDriverConfigLoaderBuilder(@NonNull ClassLoader appClassLoader) {
this(
() ->
ConfigFactory.defaultApplication(appClassLoader)
.withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())),
DefaultDriverConfigLoader.DEFAULT_ROOT_PATH);
}
/**
* Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} using a custom
* fallback config supplier.
*
* @param fallbackSupplier the supplier that will provide fallback configuration for options that
* haven't been specified programmatically.
* @param rootPath the root path used in non-programmatic sources (fallback reference.conf and
* system properties). In most cases it should be {@link
* DefaultDriverConfigLoader#DEFAULT_ROOT_PATH}. Cannot be null but can be empty.
*/
public DefaultProgrammaticDriverConfigLoaderBuilder(
@NonNull Supplier<Config> fallbackSupplier, @NonNull String rootPath) {
this.fallbackSupplier = fallbackSupplier;
this.rootPath = rootPath;
}
private ProgrammaticDriverConfigLoaderBuilder with(
@NonNull DriverOption option, @Nullable Object value) {
return with(option.getPath(), value);
}
private ProgrammaticDriverConfigLoaderBuilder with(@NonNull String path, @Nullable Object value) {
if (!DriverExecutionProfile.DEFAULT_NAME.equals(currentProfileName)) {
path = "profiles." + currentProfileName + "." + path;
}
if (!rootPath.isEmpty()) {
path = rootPath + "." + path;
}
values.put(path, value);
return this;
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder startProfile(@NonNull String profileName) {
currentProfileName = Objects.requireNonNull(profileName);
return this;
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder endProfile() {
currentProfileName = DriverExecutionProfile.DEFAULT_NAME;
return this;
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withBoolean(
@NonNull DriverOption option, boolean value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withBooleanList(
@NonNull DriverOption option, @NonNull List<Boolean> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withInt(@NonNull DriverOption option, int value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withIntList(
@NonNull DriverOption option, @NonNull List<Integer> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withLong(@NonNull DriverOption option, long value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withLongList(
@NonNull DriverOption option, @NonNull List<Long> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withDouble(
@NonNull DriverOption option, double value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withDoubleList(
@NonNull DriverOption option, @NonNull List<Double> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withString(
@NonNull DriverOption option, @NonNull String value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withStringList(
@NonNull DriverOption option, @NonNull List<String> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withStringMap(
@NonNull DriverOption option, @NonNull Map<String, String> value) {
for (String key : value.keySet()) {
this.with(option.getPath() + "." + key, value.get(key));
}
return this;
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withBytes(@NonNull DriverOption option, long value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withBytesList(
@NonNull DriverOption option, @NonNull List<Long> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withDuration(
@NonNull DriverOption option, @NonNull Duration value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder withDurationList(
@NonNull DriverOption option, @NonNull List<Duration> value) {
return with(option, value);
}
@NonNull
@Override
public ProgrammaticDriverConfigLoaderBuilder without(@NonNull DriverOption option) {
return with(option, null);
}
@NonNull
@Override
public DriverConfigLoader build() {
return new DefaultDriverConfigLoader(
() -> {
ConfigFactory.invalidateCaches();
Config programmaticConfig = buildConfig();
Config config =
ConfigFactory.defaultOverrides()
.withFallback(programmaticConfig)
.withFallback(fallbackSupplier.get())
.resolve();
// Only remove rootPath after the merge between system properties
// and fallback configuration, since both are supposed to
// contain the same rootPath prefix.
return rootPath.isEmpty() ? config : config.getConfig(rootPath);
});
}
private Config buildConfig() {
Config config = ConfigFactory.empty();
for (Map.Entry<String, Object> entry : values.entrySet()) {
config = config.withValue(entry.getKey(), ConfigValueFactory.fromAnyRef(entry.getValue()));
}
return config;
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.impl;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.ImmutableSet;
import org.onosproject.cluster.NodeId;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.AnnotationKeys;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Host;
import org.onosproject.net.Link;
import org.onosproject.net.Port;
import org.onosproject.net.config.NetworkConfigService;
import org.onosproject.net.config.basics.BasicDeviceConfig;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.host.HostService;
import org.onosproject.net.link.LinkService;
import org.onosproject.net.pi.model.PiPipeconfId;
import org.onosproject.net.pi.service.PiPipeconfService;
import org.onosproject.ui.RequestHandler;
import org.onosproject.ui.UiMessageHandler;
import org.onosproject.ui.table.TableModel;
import org.onosproject.ui.table.TableRequestHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static org.apache.commons.lang.WordUtils.capitalizeFully;
import static org.onosproject.net.DeviceId.deviceId;
/**
* Message handler for device view related messages.
*/
public class DeviceViewMessageHandler extends UiMessageHandler {
private static final String DEV_DATA_REQ = "deviceDataRequest";
private static final String DEV_DATA_RESP = "deviceDataResponse";
private static final String DEVICES = "devices";
private static final String DEVICE = "device";
private static final String DEV_DETAILS_REQ = "deviceDetailsRequest";
private static final String DEV_DETAILS_RESP = "deviceDetailsResponse";
private static final String DETAILS = "details";
private static final String DEV_NAME_CHANGE_REQ = "deviceNameChangeRequest";
private static final String DEV_NAME_CHANGE_RESP = "deviceNameChangeResponse";
private static final String ZERO_URI = "of:0000000000000000";
private static final String ID = "id";
private static final String TYPE = "type";
private static final String AVAILABLE = "available";
private static final String AVAILABLE_IID = "_iconid_available";
private static final String TYPE_IID = "_iconid_type";
private static final String DEV_ICON_PREFIX = "devIcon_";
private static final String NUM_PORTS = "num_ports";
private static final String LINK_DEST = "elinks_dest";
private static final String MFR = "mfr";
private static final String HW = "hw";
private static final String SW = "sw";
private static final String PROTOCOL = "protocol";
private static final String PIPECONF = "pipeconf";
private static final String MASTER_ID = "masterid";
private static final String CHASSIS_ID = "chassisid";
private static final String SERIAL = "serial";
private static final String PORTS = "ports";
private static final String ENABLED = "enabled";
private static final String SPEED = "speed";
private static final String NAME = "name";
private static final String WARN = "warn";
private static final String NONE = "none";
private static final String[] COL_IDS = {
AVAILABLE, AVAILABLE_IID, TYPE_IID,
NAME, ID, MASTER_ID, NUM_PORTS, MFR, HW, SW,
PROTOCOL, CHASSIS_ID, SERIAL
};
private static final String ICON_ID_ONLINE = "active";
private static final String ICON_ID_OFFLINE = "inactive";
private final Logger log = LoggerFactory.getLogger(getClass());
@Override
protected Collection<RequestHandler> createRequestHandlers() {
return ImmutableSet.of(
new DataRequestHandler(),
new NameChangeHandler(),
new DetailRequestHandler()
);
}
// Get friendly name of the device from the annotations
private static String deviceName(Device device) {
String name = device.annotations().value(AnnotationKeys.NAME);
return isNullOrEmpty(name) ? device.id().toString() : name;
}
private static String deviceProtocol(Device device) {
String protocol = device.annotations().value(PROTOCOL);
return protocol != null ? protocol : "";
}
private static String getTypeIconId(Device d) {
return DEV_ICON_PREFIX + d.type().toString();
}
// handler for device table requests
private final class DataRequestHandler extends TableRequestHandler {
private static final String NO_ROWS_MESSAGE = "No devices found";
private DataRequestHandler() {
super(DEV_DATA_REQ, DEV_DATA_RESP, DEVICES);
}
@Override
protected String[] getColumnIds() {
return COL_IDS;
}
@Override
protected String noRowsMessage(ObjectNode payload) {
return NO_ROWS_MESSAGE;
}
@Override
protected void populateTable(TableModel tm, ObjectNode payload) {
DeviceService ds = get(DeviceService.class);
MastershipService ms = get(MastershipService.class);
for (Device dev : ds.getDevices()) {
populateRow(tm.addRow(), dev, ds, ms);
}
}
private void populateRow(TableModel.Row row, Device dev,
DeviceService ds, MastershipService ms) {
DeviceId id = dev.id();
boolean available = ds.isAvailable(id);
String iconId = available ? ICON_ID_ONLINE : ICON_ID_OFFLINE;
row.cell(ID, id)
.cell(NAME, deviceName(dev))
.cell(AVAILABLE, available)
.cell(AVAILABLE_IID, iconId)
.cell(TYPE_IID, getTypeIconId(dev))
.cell(MFR, dev.manufacturer())
.cell(HW, dev.hwVersion())
.cell(SW, dev.swVersion())
.cell(PROTOCOL, deviceProtocol(dev))
.cell(NUM_PORTS, ds.getPorts(id).size())
.cell(MASTER_ID, ms.getMasterFor(id));
}
}
// handler for selected device detail requests
private final class DetailRequestHandler extends RequestHandler {
private DetailRequestHandler() {
super(DEV_DETAILS_REQ);
}
@Override
public void process(ObjectNode payload) {
String id = string(payload, ID, ZERO_URI);
DeviceId deviceId = deviceId(id);
DeviceService service = get(DeviceService.class);
MastershipService ms = get(MastershipService.class);
Device device = service.getDevice(deviceId);
ObjectNode data = objectNode();
NodeId masterFor = ms.getMasterFor(deviceId);
data.put(ID, deviceId.toString());
data.put(NAME, deviceName(device));
data.put(TYPE, capitalizeFully(device.type().toString()));
data.put(TYPE_IID, getTypeIconId(device));
data.put(MFR, device.manufacturer());
data.put(HW, device.hwVersion());
data.put(SW, device.swVersion());
data.put(SERIAL, device.serialNumber());
data.put(CHASSIS_ID, device.chassisId().toString());
data.put(MASTER_ID, masterFor != null ? masterFor.toString() : NONE);
data.put(PROTOCOL, deviceProtocol(device));
data.put(PIPECONF, devicePipeconf(device));
ArrayNode ports = arrayNode();
List<Port> portList = new ArrayList<>(service.getPorts(deviceId));
portList.sort((p1, p2) -> {
long delta = p1.number().toLong() - p2.number().toLong();
return delta == 0 ? 0 : (delta < 0 ? -1 : +1);
});
for (Port p : portList) {
ports.add(portData(p, deviceId));
}
data.set(PORTS, ports);
ObjectNode rootNode = objectNode();
rootNode.set(DETAILS, data);
// NOTE: ... an alternate way of getting all the details of an item:
// Use the codec context to get a JSON of the device. See ONOS-5976.
rootNode.set(DEVICE, getJsonCodecContext().encode(device, Device.class));
sendMessage(DEV_DETAILS_RESP, rootNode);
}
private ObjectNode portData(Port p, DeviceId id) {
ObjectNode port = objectNode();
LinkService ls = get(LinkService.class);
String name = p.annotations().value(AnnotationKeys.PORT_NAME);
port.put(ID, capitalizeFully(p.number().toString()));
port.put(TYPE, capitalizeFully(p.type().toString()));
port.put(SPEED, p.portSpeed());
port.put(ENABLED, p.isEnabled());
port.put(NAME, name != null ? name : "");
ConnectPoint connectPoint = new ConnectPoint(id, p.number());
Set<Link> links = ls.getEgressLinks(connectPoint);
if (!links.isEmpty()) {
StringBuilder egressLinks = new StringBuilder();
for (Link l : links) {
ConnectPoint dest = l.dst();
egressLinks.append(dest.elementId()).append("/")
.append(dest.port()).append(" ");
}
port.put(LINK_DEST, egressLinks.toString());
} else {
HostService hs = get(HostService.class);
Set<Host> hosts = hs.getConnectedHosts(connectPoint);
if (hosts != null && !hosts.isEmpty()) {
port.put(LINK_DEST, hosts.iterator().next().id().toString());
}
}
return port;
}
private String devicePipeconf(Device device) {
PiPipeconfService service = get(PiPipeconfService.class);
Optional<PiPipeconfId> pipeconfId = service.ofDevice(device.id());
if (pipeconfId.isPresent()) {
return pipeconfId.get().id();
} else {
return NONE;
}
}
}
// handler for changing device friendly name
private final class NameChangeHandler extends RequestHandler {
private NameChangeHandler() {
super(DEV_NAME_CHANGE_REQ);
}
@Override
public void process(ObjectNode payload) {
DeviceId deviceId = deviceId(string(payload, ID, ZERO_URI));
String name = emptyToNull(string(payload, NAME, null));
log.debug("Name change request: {} -- '{}'", deviceId, name);
NetworkConfigService service = get(NetworkConfigService.class);
BasicDeviceConfig cfg =
service.addConfig(deviceId, BasicDeviceConfig.class);
// Name attribute missing from the payload (or empty string)
// means that the friendly name should be unset.
cfg.name(name);
cfg.apply();
sendMessage(DEV_NAME_CHANGE_RESP, payload);
}
}
}
| |
package org.togglz.googleclouddatastore.repository;
import com.google.cloud.datastore.BooleanValue;
import com.google.cloud.datastore.Datastore;
import com.google.cloud.datastore.Entity;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.StringValue;
import com.google.cloud.datastore.Transaction;
import com.google.cloud.datastore.Value;
import com.google.cloud.datastore.testing.LocalDatastoreHelper;
import org.joda.time.Duration;
import org.junit.jupiter.api.*;
import org.togglz.core.Feature;
import org.togglz.core.repository.FeatureState;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.*;
import static org.togglz.googleclouddatastore.repository.GoogleCloudDatastoreStateRepository.KIND_DEFAULT;
public class GoogleCloudDatastoreStateRepositoryIT {
private static final int MAX_ENTITY_GROUPS = 25;
private static final LocalDatastoreHelper HELPER = LocalDatastoreHelper.create(1.0);
private static final Datastore DATASTORE = HELPER.getOptions().getService();
private static final String STRATEGY_ID = "myStrategy";
private GoogleCloudDatastoreStateRepository repository;
@BeforeAll
public static void beforeClass() throws IOException, InterruptedException {
HELPER.start();
}
@BeforeEach
public void setUp() throws Exception {
repository = new GoogleCloudDatastoreStateRepository(DATASTORE);
}
@AfterAll
public static void afterClass() throws IOException, InterruptedException, TimeoutException {
HELPER.stop(Duration.standardMinutes(1));
}
@AfterEach
public void tearDown() throws Exception {
HELPER.reset();
}
@Test
public void shouldUseGiveKindWhenPersisting() {
// GIVEN a repo with a custom kind
final String kind = "CustomKind";
repository = new GoogleCloudDatastoreStateRepository(DATASTORE, kind);
// WHEN a feature is persisted
final FeatureState state = new FeatureState(TestFeature.F1);
repository.setFeatureState(state);
// THEN new entities should be persisted within it
final Key key = DATASTORE.newKeyFactory().setKind(kind).newKey(TestFeature.F1.name());
final Entity entity = DATASTORE.get(key);
assertNotNull(entity);
}
@Test
public void testShouldSaveStateWithoutStrategyOrParameters() {
//WHEN a feature without strategy is persisted
final FeatureState state = new FeatureState(TestFeature.F1).disable();
repository.setFeatureState(state);
//THEN there should be a corresponding entry in the database
final Key key = createKey(TestFeature.F1.name());
final Entity featureEntity = DATASTORE.get(key);
assertFalse(featureEntity.getBoolean(GoogleCloudDatastoreStateRepository.ENABLED));
assertFalse(featureEntity.contains(GoogleCloudDatastoreStateRepository.STRATEGY_ID));
assertFalse(featureEntity.contains(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_NAMES));
assertFalse(featureEntity.contains(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_VALUES));
}
@Test
public void testShouldSaveStateStrategyAndParameters() {
// WHEN a feature without strategy is persisted
final FeatureState state = new FeatureState(TestFeature.F1)
.enable()
.setStrategyId("someId")
.setParameter("param", "foo");
repository.setFeatureState(state);
// THEN there should be a corresponding entry in the database
final Key key = createKey(TestFeature.F1.name());
final Entity featureEntity = DATASTORE.get(key);
assertTrue(featureEntity.getBoolean(GoogleCloudDatastoreStateRepository.ENABLED));
assertEquals("someId", featureEntity.getString(GoogleCloudDatastoreStateRepository.STRATEGY_ID));
final StringValue param = NonIndexed.valueOf("param");
assertEquals(singletonList(param), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_NAMES));
final StringValue foo = NonIndexed.valueOf("foo");
assertEquals(singletonList(foo), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_VALUES));
}
@Test
public void shouldReturnNullWhenStateDoesntExist() {
// GIVEN there is no feature state in the DATASTORE WHEN the repository reads the state
final FeatureState state = repository.getFeatureState(TestFeature.F1);
// THEN the properties should be set like expected
assertNull(state);
}
@Test
public void testShouldReadStateWithoutStrategyAndParameters() {
// GIVEN a database row containing a simple feature state
givenDisabledFeature("F1");
// WHEN the repository reads the state
final FeatureState state = repository.getFeatureState(TestFeature.F1);
// THEN the properties should be set like expected
assertNotNull(state);
assertEquals(TestFeature.F1, state.getFeature());
assertFalse(state.isEnabled());
assertNull(state.getStrategyId());
assertEquals(0, state.getParameterNames().size());
}
@Test
public void testShouldReadStateWithStrategyAndParameters() {
// GIVEN a database row containing a simple feature state
givenEnabledFeatureWithStrategy("F1");
// WHEN the repository reads the state
final FeatureState state = repository.getFeatureState(TestFeature.F1);
// THEN the properties should be set like expected
assertNotNull(state);
assertEquals(TestFeature.F1, state.getFeature());
assertTrue(state.isEnabled());
assertEquals(STRATEGY_ID, state.getStrategyId());
assertEquals(1, state.getParameterNames().size());
assertEquals("foobar", state.getParameter("param23"));
}
@Test
public void testShouldUpdateExistingDatabaseEntry() {
// GIVEN a database row containing a simple feature state
givenEnabledFeatureWithStrategy("F1");
// AND the database entries are like expected
// THEN there should be a corresponding entry in the database
final Key key = createKey(TestFeature.F1.name());
Entity featureEntity = DATASTORE.get(key);
assertTrue(featureEntity.getBoolean(GoogleCloudDatastoreStateRepository.ENABLED));
assertEquals(STRATEGY_ID, featureEntity.getString(GoogleCloudDatastoreStateRepository.STRATEGY_ID));
StringValue param = NonIndexed.valueOf("param23");
assertEquals(singletonList(param), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_NAMES));
StringValue foo = NonIndexed.valueOf("foobar");
assertEquals(singletonList(foo), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_VALUES));
// WHEN the repository writes new state
final FeatureState state = new FeatureState(TestFeature.F1)
.disable()
.setStrategyId("someId")
.setParameter("param", "foo");
repository.setFeatureState(state);
// THEN the properties should be set like expected
featureEntity = DATASTORE.get(key);
assertFalse(featureEntity.getBoolean(GoogleCloudDatastoreStateRepository.ENABLED));
assertEquals("someId", featureEntity.getString(GoogleCloudDatastoreStateRepository.STRATEGY_ID));
param = NonIndexed.valueOf("param");
assertEquals(singletonList(param), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_NAMES));
foo = NonIndexed.valueOf("foo");
assertEquals(singletonList(foo), featureEntity.<StringValue>getList(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_VALUES));
}
@Test
public void shouldNotAddNewEntityGroupToCurrentCrossGroupTransaction() {
givenDisabledFeature("F");
final Transaction txn = DATASTORE.newTransaction();
for (int i = 0; i < MAX_ENTITY_GROUPS - 1; i++) {
putWithinTransaction("F" + i, false, txn);
}
putWithinTransaction("F", false, txn);
repository.getFeatureState(TestFeature.F1);
txn.commit();
}
@Test
public void shouldWorkInsideRunningTransaction() {
givenDisabledFeature("F1");
final Transaction txn = DATASTORE.newTransaction();
putWithinTransaction("F3", false, txn);
repository.getFeatureState(TestFeature.F1);
txn.commit();
}
private Key createKey(String name) {
return DATASTORE.newKeyFactory().setKind(KIND_DEFAULT).newKey(name);
}
private void givenDisabledFeature(String featureName) {
put(featureName, false, null, null, null);
}
private void givenEnabledFeatureWithStrategy(String featureName) {
put(featureName, true, STRATEGY_ID, new HashMap<String, String>() {{
put("param23", "foobar");
}});
}
private void putWithinTransaction(final String name, final boolean enabled, final Transaction txn) {
put(name, enabled, null, null, txn);
}
private void put(final String name, final boolean enabled, final String strategyId, final Map<String, String> params) {
put(name, enabled, strategyId, params, null);
}
private void put(final String name, final boolean enabled, final String strategyId, final Map<String, String> params,
final Transaction txn) {
final Key key = createKey(name);
final Entity.Builder builder = Entity.newBuilder(key)
.set(GoogleCloudDatastoreStateRepository.ENABLED, BooleanValue.newBuilder(enabled).setExcludeFromIndexes(true).build());
if (strategyId != null) {
builder.set(GoogleCloudDatastoreStateRepository.STRATEGY_ID, StringValue.newBuilder(strategyId).setExcludeFromIndexes(true).build());
}
if (params != null && !params.isEmpty()) {
final List<Value<String>> strategyParamsNames = new ArrayList<>(params.size());
final List<Value<String>> strategyParamsValues = new ArrayList<>(params.size());
for (final String paramName : params.keySet()) {
strategyParamsNames.add(StringValue.newBuilder(paramName).setExcludeFromIndexes(true).build());
strategyParamsValues.add(StringValue.newBuilder(params.get(paramName)).setExcludeFromIndexes(true).build());
}
builder.set(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_NAMES, strategyParamsNames);
builder.set(GoogleCloudDatastoreStateRepository.STRATEGY_PARAMS_VALUES, strategyParamsValues);
}
if (txn == null) {
DATASTORE.put(builder.build());
} else {
txn.put(builder.build());
}
}
private enum TestFeature implements Feature {
F1
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.rest;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.camel.CamelContext;
import org.apache.camel.model.OptionalIdentifiedDefinition;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.ToDefinition;
import org.apache.camel.model.ToDynamicDefinition;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.util.FileUtil;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
/**
* Defines a rest service using the rest-dsl
*/
@Metadata(label = "rest")
@XmlRootElement(name = "rest")
@XmlAccessorType(XmlAccessType.FIELD)
public class RestDefinition extends OptionalIdentifiedDefinition<RestDefinition> {
@XmlAttribute
private String path;
@XmlAttribute
private String tag;
@XmlAttribute
private String consumes;
@XmlAttribute
private String produces;
@XmlAttribute @Metadata(defaultValue = "auto")
private RestBindingMode bindingMode;
@XmlAttribute
private Boolean skipBindingOnErrorCode;
@XmlAttribute
private Boolean enableCORS;
@XmlAttribute
private Boolean apiDocs;
@XmlElementRef
private List<VerbDefinition> verbs = new ArrayList<VerbDefinition>();
@Override
public String getLabel() {
return "rest";
}
public String getPath() {
return path;
}
/**
* Path of the rest service, such as "/foo"
*/
public void setPath(String path) {
this.path = path;
}
public String getTag() {
return tag;
}
/**
* To configure a special tag for the operations within this rest definition.
*/
public void setTag(String tag) {
this.tag = tag;
}
public String getConsumes() {
return consumes;
}
/**
* To define the content type what the REST service consumes (accept as input), such as application/xml or application/json.
* This option will override what may be configured on a parent level
*/
public void setConsumes(String consumes) {
this.consumes = consumes;
}
public String getProduces() {
return produces;
}
/**
* To define the content type what the REST service produces (uses for output), such as application/xml or application/json
* This option will override what may be configured on a parent level
*/
public void setProduces(String produces) {
this.produces = produces;
}
public RestBindingMode getBindingMode() {
return bindingMode;
}
/**
* Sets the binding mode to use.
* This option will override what may be configured on a parent level
* <p/>
* The default value is auto
*/
public void setBindingMode(RestBindingMode bindingMode) {
this.bindingMode = bindingMode;
}
public List<VerbDefinition> getVerbs() {
return verbs;
}
/**
* The HTTP verbs this REST service accepts and uses
*/
public void setVerbs(List<VerbDefinition> verbs) {
this.verbs = verbs;
}
public Boolean getSkipBindingOnErrorCode() {
return skipBindingOnErrorCode;
}
/**
* Whether to skip binding on output if there is a custom HTTP error code header.
* This allows to build custom error messages that do not bind to json / xml etc, as success messages otherwise will do.
* This option will override what may be configured on a parent level
*/
public void setSkipBindingOnErrorCode(Boolean skipBindingOnErrorCode) {
this.skipBindingOnErrorCode = skipBindingOnErrorCode;
}
public Boolean getEnableCORS() {
return enableCORS;
}
/**
* Whether to enable CORS headers in the HTTP response.
* This option will override what may be configured on a parent level
* <p/>
* The default value is false.
*/
public void setEnableCORS(Boolean enableCORS) {
this.enableCORS = enableCORS;
}
public Boolean getApiDocs() {
return apiDocs;
}
/**
* Whether to include or exclude the VerbDefinition in API documentation.
* This option will override what may be configured on a parent level
* <p/>
* The default value is true.
*/
public void setApiDocs(Boolean apiDocs) {
this.apiDocs = apiDocs;
}
// Fluent API
//-------------------------------------------------------------------------
/**
* To set the base path of this REST service
*/
public RestDefinition path(String path) {
setPath(path);
return this;
}
/**
* To set the tag to use of this REST service
*/
public RestDefinition tag(String tag) {
setTag(tag);
return this;
}
public RestDefinition get() {
return addVerb("get", null);
}
public RestDefinition get(String uri) {
return addVerb("get", uri);
}
public RestDefinition post() {
return addVerb("post", null);
}
public RestDefinition post(String uri) {
return addVerb("post", uri);
}
public RestDefinition put() {
return addVerb("put", null);
}
public RestDefinition put(String uri) {
return addVerb("put", uri);
}
public RestDefinition delete() {
return addVerb("delete", null);
}
public RestDefinition delete(String uri) {
return addVerb("delete", uri);
}
public RestDefinition head() {
return addVerb("head", null);
}
public RestDefinition head(String uri) {
return addVerb("head", uri);
}
@Deprecated
public RestDefinition options() {
return addVerb("options", null);
}
@Deprecated
public RestDefinition options(String uri) {
return addVerb("options", uri);
}
public RestDefinition verb(String verb) {
return addVerb(verb, null);
}
public RestDefinition verb(String verb, String uri) {
return addVerb(verb, uri);
}
@Override
public RestDefinition id(String id) {
if (getVerbs().isEmpty()) {
super.id(id);
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.id(id);
}
return this;
}
@Override
public RestDefinition description(String text) {
if (getVerbs().isEmpty()) {
super.description(text);
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.description(text);
}
return this;
}
@Override
public RestDefinition description(String id, String text, String lang) {
if (getVerbs().isEmpty()) {
super.description(id, text, lang);
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.description(id, text, lang);
}
return this;
}
public RestDefinition consumes(String mediaType) {
if (getVerbs().isEmpty()) {
this.consumes = mediaType;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setConsumes(mediaType);
}
return this;
}
public RestOperationParamDefinition param() {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
return param(verb);
}
public RestDefinition param(RestOperationParamDefinition param) {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.getParams().add(param);
return this;
}
public RestDefinition params(List<RestOperationParamDefinition> params) {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.getParams().addAll(params);
return this;
}
public RestOperationParamDefinition param(VerbDefinition verb) {
return new RestOperationParamDefinition(verb);
}
public RestDefinition responseMessage(RestOperationResponseMsgDefinition msg) {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.getResponseMsgs().add(msg);
return this;
}
public RestOperationResponseMsgDefinition responseMessage() {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
return responseMessage(verb);
}
public RestOperationResponseMsgDefinition responseMessage(VerbDefinition verb) {
return new RestOperationResponseMsgDefinition(verb);
}
public RestDefinition responseMessages(List<RestOperationResponseMsgDefinition> msgs) {
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.getResponseMsgs().addAll(msgs);
return this;
}
public RestDefinition produces(String mediaType) {
if (getVerbs().isEmpty()) {
this.produces = mediaType;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setProduces(mediaType);
}
return this;
}
public RestDefinition type(Class<?> classType) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setType(classType.getCanonicalName());
return this;
}
public RestDefinition typeList(Class<?> classType) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
// list should end with [] to indicate array
verb.setType(classType.getCanonicalName() + "[]");
return this;
}
public RestDefinition outType(Class<?> classType) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setOutType(classType.getCanonicalName());
return this;
}
public RestDefinition outTypeList(Class<?> classType) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
// list should end with [] to indicate array
verb.setOutType(classType.getCanonicalName() + "[]");
return this;
}
public RestDefinition bindingMode(RestBindingMode mode) {
if (getVerbs().isEmpty()) {
this.bindingMode = mode;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setBindingMode(mode);
}
return this;
}
public RestDefinition skipBindingOnErrorCode(boolean skipBindingOnErrorCode) {
if (getVerbs().isEmpty()) {
this.skipBindingOnErrorCode = skipBindingOnErrorCode;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setSkipBindingOnErrorCode(skipBindingOnErrorCode);
}
return this;
}
public RestDefinition enableCORS(boolean enableCORS) {
if (getVerbs().isEmpty()) {
this.enableCORS = enableCORS;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setEnableCORS(enableCORS);
}
return this;
}
/**
* Include or exclude the current Rest Definition in API documentation.
* <p/>
* The default value is true.
*/
public RestDefinition apiDocs(Boolean apiDocs) {
if (getVerbs().isEmpty()) {
this.apiDocs = apiDocs;
} else {
// add on last verb as that is how the Java DSL works
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setApiDocs(apiDocs);
}
return this;
}
/**
* Routes directly to the given static endpoint.
* <p/>
* If you need additional routing capabilities, then use {@link #route()} instead.
*
* @param uri the uri of the endpoint
* @return this builder
*/
public RestDefinition to(String uri) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
ToDefinition to = new ToDefinition(uri);
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setTo(to);
return this;
}
/**
* Routes directly to the given dynamic endpoint.
* <p/>
* If you need additional routing capabilities, then use {@link #route()} instead.
*
* @param uri the uri of the endpoint
* @return this builder
*/
public RestDefinition toD(String uri) {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
ToDynamicDefinition to = new ToDynamicDefinition(uri);
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setToD(to);
return this;
}
public RouteDefinition route() {
// add to last verb
if (getVerbs().isEmpty()) {
throw new IllegalArgumentException("Must add verb first, such as get/post/delete");
}
// link them together so we can navigate using Java DSL
RouteDefinition route = new RouteDefinition();
route.setRestDefinition(this);
VerbDefinition verb = getVerbs().get(getVerbs().size() - 1);
verb.setRoute(route);
return route;
}
// Implementation
//-------------------------------------------------------------------------
private RestDefinition addVerb(String verb, String uri) {
VerbDefinition answer;
if ("get".equals(verb)) {
answer = new GetVerbDefinition();
} else if ("post".equals(verb)) {
answer = new PostVerbDefinition();
} else if ("delete".equals(verb)) {
answer = new DeleteVerbDefinition();
} else if ("head".equals(verb)) {
answer = new HeadVerbDefinition();
} else if ("put".equals(verb)) {
answer = new PutVerbDefinition();
} else if ("options".equals(verb)) {
answer = new OptionsVerbDefinition();
} else {
answer = new VerbDefinition();
answer.setMethod(verb);
}
getVerbs().add(answer);
answer.setRest(this);
answer.setUri(uri);
return this;
}
/**
* Transforms this REST definition into a list of {@link org.apache.camel.model.RouteDefinition} which
* Camel routing engine can add and run. This allows us to define REST services using this
* REST DSL and turn those into regular Camel routes.
*/
public List<RouteDefinition> asRouteDefinition(CamelContext camelContext) {
// sanity check this rest definition do not have duplicates
validateUniquePaths();
List<RouteDefinition> answer = new ArrayList<RouteDefinition>();
if (camelContext.getRestConfigurations().isEmpty()) {
camelContext.getRestConfiguration();
}
for (RestConfiguration config : camelContext.getRestConfigurations()) {
addRouteDefinition(camelContext, answer, config.getComponent());
}
return answer;
}
protected void validateUniquePaths() {
Set<String> paths = new HashSet<String>();
for (VerbDefinition verb : verbs) {
String path = verb.asVerb();
if (verb.getUri() != null) {
path += ":" + verb.getUri();
}
if (!paths.add(path)) {
throw new IllegalArgumentException("Duplicate verb detected in rest-dsl: " + path);
}
}
}
/**
* Transforms the rest api configuration into a {@link org.apache.camel.model.RouteDefinition} which
* Camel routing engine uses to service the rest api docs.
*/
public static RouteDefinition asRouteApiDefinition(CamelContext camelContext, RestConfiguration configuration) {
RouteDefinition answer = new RouteDefinition();
// create the from endpoint uri which is using the rest-api component
String from = "rest-api:" + configuration.getApiContextPath();
// append options
Map<String, Object> options = new HashMap<String, Object>();
String routeId = configuration.getApiContextRouteId();
if (routeId == null) {
routeId = answer.idOrCreate(camelContext.getNodeIdFactory());
}
options.put("routeId", routeId);
if (configuration.getComponent() != null && !configuration.getComponent().isEmpty()) {
options.put("componentName", configuration.getComponent());
}
if (configuration.getApiContextIdPattern() != null) {
options.put("contextIdPattern", configuration.getApiContextIdPattern());
}
if (!options.isEmpty()) {
String query;
try {
query = URISupport.createQueryString(options);
} catch (URISyntaxException e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
from = from + "?" + query;
}
// we use the same uri as the producer (so we have a little route for the rest api)
String to = from;
answer.fromRest(from);
answer.id(routeId);
answer.to(to);
return answer;
}
private void addRouteDefinition(CamelContext camelContext, List<RouteDefinition> answer, String component) {
for (VerbDefinition verb : getVerbs()) {
// either the verb has a singular to or a embedded route
RouteDefinition route = verb.getRoute();
if (route == null) {
// it was a singular to, so add a new route and add the singular
// to as output to this route
route = new RouteDefinition();
ProcessorDefinition def = verb.getTo() != null ? verb.getTo() : verb.getToD();
route.getOutputs().add(def);
}
// add the binding
RestBindingDefinition binding = new RestBindingDefinition();
binding.setComponent(component);
binding.setType(verb.getType());
binding.setOutType(verb.getOutType());
// verb takes precedence over configuration on rest
if (verb.getConsumes() != null) {
binding.setConsumes(verb.getConsumes());
} else {
binding.setConsumes(getConsumes());
}
if (verb.getProduces() != null) {
binding.setProduces(verb.getProduces());
} else {
binding.setProduces(getProduces());
}
if (verb.getBindingMode() != null) {
binding.setBindingMode(verb.getBindingMode());
} else {
binding.setBindingMode(getBindingMode());
}
if (verb.getSkipBindingOnErrorCode() != null) {
binding.setSkipBindingOnErrorCode(verb.getSkipBindingOnErrorCode());
} else {
binding.setSkipBindingOnErrorCode(getSkipBindingOnErrorCode());
}
if (verb.getEnableCORS() != null) {
binding.setEnableCORS(verb.getEnableCORS());
} else {
binding.setEnableCORS(getEnableCORS());
}
// register all the default values for the query parameters
for (RestOperationParamDefinition param : verb.getParams()) {
if (RestParamType.query == param.getType() && param.getDefaultValue() != null) {
binding.addDefaultValue(param.getName(), param.getDefaultValue());
}
}
route.getOutputs().add(0, binding);
// create the from endpoint uri which is using the rest component
String from = "rest:" + verb.asVerb() + ":" + buildUri(verb);
// append options
Map<String, Object> options = new HashMap<String, Object>();
// verb takes precedence over configuration on rest
if (verb.getConsumes() != null) {
options.put("consumes", verb.getConsumes());
} else if (getConsumes() != null) {
options.put("consumes", getConsumes());
}
if (verb.getProduces() != null) {
options.put("produces", verb.getProduces());
} else if (getProduces() != null) {
options.put("produces", getProduces());
}
// append optional type binding information
String inType = binding.getType();
if (inType != null) {
options.put("inType", inType);
}
String outType = binding.getOutType();
if (outType != null) {
options.put("outType", outType);
}
// if no route id has been set, then use the verb id as route id
if (!route.hasCustomIdAssigned()) {
// use id of verb as route id
String id = verb.getId();
if (id != null) {
route.setId(id);
}
}
String routeId = route.idOrCreate(camelContext.getNodeIdFactory());
verb.setRouteId(routeId);
options.put("routeId", routeId);
if (component != null && !component.isEmpty()) {
options.put("componentName", component);
}
// include optional description, which we favor from 1) to/route description 2) verb description 3) rest description
// this allows end users to define general descriptions and override then per to/route or verb
String description = verb.getTo() != null ? verb.getTo().getDescriptionText() : route.getDescriptionText();
if (description == null) {
description = verb.getDescriptionText();
}
if (description == null) {
description = getDescriptionText();
}
if (description != null) {
options.put("description", description);
}
if (!options.isEmpty()) {
String query;
try {
query = URISupport.createQueryString(options);
} catch (URISyntaxException e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
from = from + "?" + query;
}
String path = getPath();
String s1 = FileUtil.stripTrailingSeparator(path);
String s2 = FileUtil.stripLeadingSeparator(verb.getUri());
String allPath;
if (s1 != null && s2 != null) {
allPath = s1 + "/" + s2;
} else if (path != null) {
allPath = path;
} else {
allPath = verb.getUri();
}
// each {} is a parameter
String[] arr = allPath.split("\\/");
for (String a : arr) {
if (a.startsWith("{") && a.endsWith("}")) {
String key = a.substring(1, a.length() - 1);
// merge if exists
boolean found = false;
for (RestOperationParamDefinition param : verb.getParams()) {
// name is mandatory
ObjectHelper.notEmpty(param.getName(), "parameter name");
if (param.getName().equalsIgnoreCase(key)) {
param.type(RestParamType.path);
found = true;
break;
}
}
if (!found) {
param(verb).name(key).type(RestParamType.path).endParam();
}
}
}
if (verb.getType() != null) {
String bodyType = verb.getType();
if (bodyType.endsWith("[]")) {
bodyType = "List[" + bodyType.substring(0, bodyType.length() - 2) + "]";
}
RestOperationParamDefinition param = findParam(verb, RestParamType.body.name());
if (param == null) {
// must be body type and set the model class as data type
param(verb).name(RestParamType.body.name()).type(RestParamType.body).dataType(bodyType).endParam();
} else {
// must be body type and set the model class as data type
param.type(RestParamType.body).dataType(bodyType);
}
}
// the route should be from this rest endpoint
route.fromRest(from);
route.id(routeId);
route.setRestDefinition(this);
answer.add(route);
}
}
private String buildUri(VerbDefinition verb) {
if (path != null && verb.getUri() != null) {
return path + ":" + verb.getUri();
} else if (path != null) {
return path;
} else if (verb.getUri() != null) {
return verb.getUri();
} else {
return "";
}
}
private RestOperationParamDefinition findParam(VerbDefinition verb, String name) {
for (RestOperationParamDefinition param : verb.getParams()) {
if (name.equals(param.getName())) {
return param;
}
}
return null;
}
}
| |
/*
* Copyright (C) 2015-2018 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.net;
import io.datakernel.bytebuf.ByteBuf;
import io.datakernel.bytebuf.ByteBufPool;
import io.datakernel.common.ApplicationSettings;
import io.datakernel.common.Check;
import io.datakernel.common.exception.AsyncTimeoutException;
import io.datakernel.common.inspector.AbstractInspector;
import io.datakernel.common.inspector.BaseInspector;
import io.datakernel.eventloop.Eventloop;
import io.datakernel.eventloop.NioChannelEventHandler;
import io.datakernel.eventloop.ScheduledRunnable;
import io.datakernel.eventloop.net.SocketSettings;
import io.datakernel.jmx.api.attribute.JmxAttribute;
import io.datakernel.jmx.stats.EventStats;
import io.datakernel.jmx.stats.ValueStats;
import io.datakernel.promise.Promise;
import io.datakernel.promise.SettablePromise;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicInteger;
import static io.datakernel.common.MemSize.kilobytes;
import static io.datakernel.common.Preconditions.checkState;
import static io.datakernel.common.Utils.nullify;
import static io.datakernel.eventloop.Eventloop.getCurrentEventloop;
import static io.datakernel.eventloop.RunnableWithContext.wrapContext;
@SuppressWarnings("WeakerAccess")
public final class AsyncTcpSocketNio implements AsyncTcpSocket, NioChannelEventHandler {
private static final Boolean CHECK = Check.isEnabled(AsyncTcpSocketNio.class);
public static final int DEFAULT_READ_BUFFER_SIZE = ApplicationSettings.getMemSize(AsyncTcpSocketNio.class, "readBufferSize", kilobytes(16)).toInt();
public static final AsyncTimeoutException TIMEOUT_EXCEPTION = new AsyncTimeoutException(AsyncTcpSocketNio.class, "timed out");
public static final int NO_TIMEOUT = 0;
private static final AtomicInteger CONNECTION_COUNT = new AtomicInteger(0);
private final Eventloop eventloop;
@Nullable
private SocketChannel channel;
@Nullable
private ByteBuf readBuf;
private boolean readEndOfStream;
@Nullable
private ByteBuf writeBuf;
private boolean writeEndOfStream;
@Nullable
private SettablePromise<ByteBuf> read;
@Nullable
private SettablePromise<Void> write;
private SelectionKey key;
private byte ops;
private int readTimeout = NO_TIMEOUT;
private int writeTimeout = NO_TIMEOUT;
private int readBufferSize = DEFAULT_READ_BUFFER_SIZE;
@Nullable
private ScheduledRunnable scheduledReadTimeout;
@Nullable
private ScheduledRunnable scheduledWriteTimeout;
@Nullable
private Inspector inspector;
public interface Inspector extends BaseInspector<Inspector> {
void onReadTimeout();
void onRead(ByteBuf buf);
void onReadEndOfStream();
void onReadError(IOException e);
void onWriteTimeout();
void onWrite(ByteBuf buf, int bytes);
void onWriteError(IOException e);
}
public abstract static class ForwardingInspector implements Inspector {
protected final @Nullable Inspector next;
public ForwardingInspector(@Nullable Inspector next) {this.next = next;}
@Override
public void onReadTimeout() {
if (next != null) next.onReadTimeout();
}
@Override
public void onRead(ByteBuf buf) {
if (next != null) next.onRead(buf);
}
@Override
public void onReadEndOfStream() {
if (next != null) next.onReadEndOfStream();
}
@Override
public void onReadError(IOException e) {
if (next != null) next.onReadError(e);
}
@Override
public void onWriteTimeout() {
if (next != null) next.onWriteTimeout();
}
@Override
public void onWrite(ByteBuf buf, int bytes) {
if (next != null) next.onWrite(buf, bytes);
}
@Override
public void onWriteError(IOException e) {
if (next != null) next.onWriteError(e);
}
@SuppressWarnings("unchecked")
@Override
public <T extends Inspector> @Nullable T lookup(Class<T> type) {
return type.isAssignableFrom(this.getClass()) ? (T) this : next != null ? next.lookup(type) : null;
}
}
public static class JmxInspector extends AbstractInspector<Inspector> implements Inspector {
public static final Duration SMOOTHING_WINDOW = Duration.ofMinutes(1);
private final ValueStats reads = ValueStats.create(SMOOTHING_WINDOW).withUnit("bytes").withRate();
private final EventStats readEndOfStreams = EventStats.create(SMOOTHING_WINDOW);
private final EventStats readErrors = EventStats.create(SMOOTHING_WINDOW);
private final EventStats readTimeouts = EventStats.create(SMOOTHING_WINDOW);
private final ValueStats writes = ValueStats.create(SMOOTHING_WINDOW).withUnit("bytes").withRate();
private final EventStats writeErrors = EventStats.create(SMOOTHING_WINDOW);
private final EventStats writeTimeouts = EventStats.create(SMOOTHING_WINDOW);
private final EventStats writeOverloaded = EventStats.create(SMOOTHING_WINDOW);
@Override
public void onReadTimeout() {
readTimeouts.recordEvent();
}
@Override
public void onRead(ByteBuf buf) {
reads.recordValue(buf.readRemaining());
}
@Override
public void onReadEndOfStream() {
readEndOfStreams.recordEvent();
}
@Override
public void onReadError(IOException e) {
readErrors.recordEvent();
}
@Override
public void onWriteTimeout() {
writeTimeouts.recordEvent();
}
@Override
public void onWrite(ByteBuf buf, int bytes) {
writes.recordValue(bytes);
if (buf.readRemaining() != bytes)
writeOverloaded.recordEvent();
}
@Override
public void onWriteError(IOException e) {
writeErrors.recordEvent();
}
@JmxAttribute
public EventStats getReadTimeouts() {
return readTimeouts;
}
@JmxAttribute
public ValueStats getReads() {
return reads;
}
@JmxAttribute
public EventStats getReadEndOfStreams() {
return readEndOfStreams;
}
@JmxAttribute
public EventStats getReadErrors() {
return readErrors;
}
@JmxAttribute
public EventStats getWriteTimeouts() {
return writeTimeouts;
}
@JmxAttribute
public ValueStats getWrites() {
return writes;
}
@JmxAttribute
public EventStats getWriteErrors() {
return writeErrors;
}
@JmxAttribute
public EventStats getWriteOverloaded() {
return writeOverloaded;
}
}
public static AsyncTcpSocketNio wrapChannel(Eventloop eventloop, SocketChannel socketChannel, @Nullable SocketSettings socketSettings) {
AsyncTcpSocketNio asyncTcpSocket = new AsyncTcpSocketNio(eventloop, socketChannel);
if (socketSettings == null) return asyncTcpSocket;
try {
socketSettings.applySettings(socketChannel);
} catch (IOException ignored) {
}
if (socketSettings.hasImplReadTimeout()) {
asyncTcpSocket.readTimeout = (int) socketSettings.getImplReadTimeoutMillis();
}
if (socketSettings.hasImplWriteTimeout()) {
asyncTcpSocket.writeTimeout = (int) socketSettings.getImplWriteTimeoutMillis();
}
if (socketSettings.hasReadBufferSize()) {
asyncTcpSocket.readBufferSize = socketSettings.getImplReadBufferSizeBytes();
}
return asyncTcpSocket;
}
public static Promise<AsyncTcpSocketNio> connect(InetSocketAddress address) {
return connect(address, null, null);
}
public static Promise<AsyncTcpSocketNio> connect(InetSocketAddress address, @Nullable Duration duration, @Nullable SocketSettings socketSettings) {
return connect(address, duration == null ? 0 : duration.toMillis(), socketSettings);
}
public static Promise<AsyncTcpSocketNio> connect(InetSocketAddress address, long timeout, @Nullable SocketSettings socketSettings) {
Eventloop eventloop = getCurrentEventloop();
return Promise.<SocketChannel>ofCallback(cb -> eventloop.connect(address, timeout, cb))
.map(channel -> wrapChannel(eventloop, channel, socketSettings));
}
public AsyncTcpSocketNio withInspector(Inspector inspector) {
this.inspector = inspector;
return this;
}
private AsyncTcpSocketNio(Eventloop eventloop, @NotNull SocketChannel socketChannel) {
this.eventloop = eventloop;
this.channel = socketChannel;
}
// endregion
public static int getConnectionCount() {
return CONNECTION_COUNT.get();
}
// timeouts management
private void scheduleReadTimeout() {
assert scheduledReadTimeout == null && readTimeout != NO_TIMEOUT;
scheduledReadTimeout = eventloop.delayBackground(readTimeout, wrapContext(this, () -> {
if (inspector != null) inspector.onReadTimeout();
scheduledReadTimeout = null;
closeEx(TIMEOUT_EXCEPTION);
}));
}
private void scheduleWriteTimeout() {
assert scheduledWriteTimeout == null && writeTimeout != NO_TIMEOUT;
scheduledWriteTimeout = eventloop.delayBackground(writeTimeout, wrapContext(this, () -> {
if (inspector != null) inspector.onWriteTimeout();
scheduledWriteTimeout = null;
closeEx(TIMEOUT_EXCEPTION);
}));
}
private void updateInterests() {
assert !isClosed() && ops >= 0;
byte newOps = (byte) (((readBuf == null && !readEndOfStream) ? SelectionKey.OP_READ : 0) | (writeBuf == null || writeEndOfStream ? 0 : SelectionKey.OP_WRITE));
if (key == null) {
ops = newOps;
try {
key = channel.register(eventloop.ensureSelector(), ops, this);
CONNECTION_COUNT.incrementAndGet();
} catch (ClosedChannelException e) {
closeEx(e);
}
} else {
if (ops != newOps) {
ops = newOps;
key.interestOps(ops);
}
}
}
@NotNull
@Override
public Promise<ByteBuf> read() {
if (CHECK) checkState(eventloop.inEventloopThread());
if (isClosed()) return Promise.ofException(CLOSE_EXCEPTION);
read = null;
if (readBuf != null || readEndOfStream) {
ByteBuf readBuf = this.readBuf;
this.readBuf = null;
return Promise.of(readBuf);
}
SettablePromise<ByteBuf> read = new SettablePromise<>();
this.read = read;
if (scheduledReadTimeout == null && readTimeout != NO_TIMEOUT) {
scheduleReadTimeout();
}
if (ops >= 0) {
updateInterests();
}
return read;
}
@Override
public void onReadReady() {
ops = (byte) (ops | 0x80);
try {
doRead();
} catch (IOException e) {
closeEx(e);
return;
}
if (read != null && (readBuf != null || readEndOfStream)) {
SettablePromise<@Nullable ByteBuf> read = this.read;
ByteBuf readBuf = this.readBuf;
this.read = null;
this.readBuf = null;
read.set(readBuf);
}
if (isClosed()) return;
ops = (byte) (ops & 0x7f);
updateInterests();
}
private void doRead() throws IOException {
assert channel != null;
ByteBuf buf = ByteBufPool.allocate(readBufferSize);
ByteBuffer buffer = buf.toWriteByteBuffer();
int numRead;
try {
numRead = channel.read(buffer);
buf.ofWriteByteBuffer(buffer);
} catch (IOException e) {
buf.recycle();
if (inspector != null) inspector.onReadError(e);
throw e;
}
if (numRead == 0) {
if (inspector != null) inspector.onRead(buf);
buf.recycle();
return;
}
scheduledReadTimeout = nullify(scheduledReadTimeout, ScheduledRunnable::cancel);
if (numRead == -1) {
buf.recycle();
if (inspector != null) inspector.onReadEndOfStream();
readEndOfStream = true;
if (writeEndOfStream && writeBuf == null) {
doClose();
}
return;
}
if (inspector != null) inspector.onRead(buf);
if (readBuf == null) {
readBuf = buf;
} else {
readBuf = ByteBufPool.ensureWriteRemaining(readBuf, buf.readRemaining());
readBuf.put(buf.array(), buf.head(), buf.readRemaining());
buf.recycle();
}
}
// write cycle
@NotNull
@Override
public Promise<Void> write(@Nullable ByteBuf buf) {
if (CHECK) {
checkState(eventloop.inEventloopThread());
checkState(!writeEndOfStream, "End of stream has already been sent");
}
if (isClosed()) {
if (buf != null) buf.recycle();
return Promise.ofException(CLOSE_EXCEPTION);
}
writeEndOfStream |= buf == null;
if (writeBuf == null) {
if (buf != null && !buf.canRead()) {
buf.recycle();
return Promise.complete();
}
writeBuf = buf;
} else {
if (buf != null) {
writeBuf = ByteBufPool.ensureWriteRemaining(this.writeBuf, buf.readRemaining());
writeBuf.put(buf.array(), buf.head(), buf.readRemaining());
buf.recycle();
}
}
if (write != null) return write;
try {
doWrite();
} catch (IOException e) {
closeEx(e);
return Promise.ofException(e);
}
if (this.writeBuf == null) {
return Promise.complete();
}
SettablePromise<Void> write = new SettablePromise<>();
this.write = write;
if (scheduledWriteTimeout == null && writeTimeout != NO_TIMEOUT) {
scheduleWriteTimeout();
}
if (ops >= 0) {
updateInterests();
}
return write;
}
@Override
public void onWriteReady() {
assert write != null;
ops = (byte) (ops | 0x80);
try {
doWrite();
} catch (IOException e) {
closeEx(e);
return;
}
if (writeBuf == null) {
SettablePromise<@Nullable Void> write = this.write;
this.write = null;
write.set(null);
}
if (isClosed()) return;
ops = (byte) (ops & 0x7f);
updateInterests();
}
private void doWrite() throws IOException {
assert channel != null;
if (writeBuf != null) {
ByteBuf buf = this.writeBuf;
ByteBuffer buffer = buf.toReadByteBuffer();
try {
channel.write(buffer);
} catch (IOException e) {
if (inspector != null) inspector.onWriteError(e);
throw e;
}
if (inspector != null) inspector.onWrite(buf, buffer.position() - buf.head());
buf.ofReadByteBuffer(buffer);
if (buf.canRead()) {
return;
} else {
buf.recycle();
writeBuf = null;
}
}
scheduledWriteTimeout = nullify(scheduledWriteTimeout, ScheduledRunnable::cancel);
if (writeEndOfStream) {
if (readEndOfStream) {
doClose();
} else {
channel.shutdownOutput();
}
}
}
@Override
public void closeEx(@NotNull Throwable e) {
if (CHECK) checkState(eventloop.inEventloopThread());
if (isClosed()) return;
doClose();
readBuf = nullify(readBuf, ByteBuf::recycle);
writeBuf = nullify(writeBuf, ByteBuf::recycle);
scheduledReadTimeout = nullify(scheduledReadTimeout, ScheduledRunnable::cancel);
scheduledWriteTimeout = nullify(scheduledWriteTimeout, ScheduledRunnable::cancel);
read = nullify(read, SettablePromise::setException, e);
write = nullify(write, SettablePromise::setException, e);
}
private void doClose() {
eventloop.closeChannel(channel, key);
channel = null;
CONNECTION_COUNT.decrementAndGet();
}
@Override
public boolean isClosed() {
return channel == null;
}
@Nullable
public SocketChannel getSocketChannel() {
return channel;
}
@Override
public String toString() {
return "AsyncTcpSocketImpl{" +
"channel=" + (channel != null ? channel : "") +
", readBuf=" + readBuf +
", writeBuf=" + writeBuf +
", readEndOfStream=" + readEndOfStream +
", writeEndOfStream=" + writeEndOfStream +
", read=" + read +
", write=" + write +
", ops=" + ops +
"}";
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.distributed;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.FakeAndroidDirectoryResolver;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.config.Config;
import com.facebook.buck.config.ConfigBuilder;
import com.facebook.buck.distributed.thrift.BuildJobState;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.event.listener.BroadcastEventListener;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.json.BuildFileParseException;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaLibraryDescription;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.parser.DefaultParserTargetNodeFactory;
import com.facebook.buck.parser.Parser;
import com.facebook.buck.parser.ParserConfig;
import com.facebook.buck.parser.ParserTargetNodeFactory;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.Cell;
import com.facebook.buck.rules.ConstructorArgMarshaller;
import com.facebook.buck.rules.DefaultCellPathResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.KnownBuildRuleTypesFactory;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.TargetNodeFactory;
import com.facebook.buck.rules.TestCellBuilder;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.coercer.TypeCoercerFactory;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.DefaultProcessExecutor;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.cache.DefaultFileHashCache;
import com.facebook.buck.util.environment.Architecture;
import com.facebook.buck.util.environment.Platform;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.MoreExecutors;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Executors;
public class DistributedBuildStateTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TemporaryPaths temporaryFolder = new TemporaryPaths();
private ProcessExecutor processExecutor = new DefaultProcessExecutor(new TestConsole());
private KnownBuildRuleTypesFactory knownBuildRuleTypesFactory = new KnownBuildRuleTypesFactory(
processExecutor,
new FakeAndroidDirectoryResolver());
@Test
public void canReconstructConfig() throws IOException, InterruptedException {
ProjectFilesystem filesystem = createJavaOnlyFilesystem("/saving");
Config config = new Config(ConfigBuilder.rawFromLines());
BuckConfig buckConfig = new BuckConfig(
config,
filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(filesystem.getRootPath(), config));
Cell rootCellWhenSaving = new TestCellBuilder()
.setFilesystem(filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(rootCellWhenSaving),
emptyActionGraph(),
createDefaultCodec(rootCellWhenSaving, Optional.empty()),
createTargetGraph(filesystem));
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
DistBuildState distributedBuildState =
DistBuildState.load(dump, rootCellWhenLoading, knownBuildRuleTypesFactory);
ImmutableMap<Integer, Cell> cells = distributedBuildState.getCells();
assertThat(cells, Matchers.aMapWithSize(1));
assertThat(
cells.get(0).getBuckConfig(),
Matchers.equalTo(buckConfig));
}
@Test
public void canReconstructGraph() throws Exception {
ProjectWorkspace projectWorkspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"simple_java_target",
temporaryFolder);
projectWorkspace.setUp();
Cell cell = projectWorkspace.asCell();
ProjectFilesystem projectFilesystem = cell.getFilesystem();
projectFilesystem.mkdirs(projectFilesystem.getBuckPaths().getBuckOut());
BuckConfig buckConfig = cell.getBuckConfig();
TypeCoercerFactory typeCoercerFactory =
new DefaultTypeCoercerFactory(ObjectMappers.newDefaultInstance());
ConstructorArgMarshaller constructorArgMarshaller =
new ConstructorArgMarshaller(typeCoercerFactory);
Parser parser = new Parser(
new BroadcastEventListener(),
buckConfig.getView(ParserConfig.class),
typeCoercerFactory,
constructorArgMarshaller);
TargetGraph targetGraph = parser.buildTargetGraph(
BuckEventBusFactory.newInstance(),
cell,
/* enableProfiling */ false,
MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()),
ImmutableSet.of(
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib")));
DistBuildTargetGraphCodec targetGraphCodec =
createDefaultCodec(cell, Optional.of(parser));
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(cell),
emptyActionGraph(),
targetGraphCodec,
targetGraph);
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
DistBuildState distributedBuildState =
DistBuildState.load(dump, rootCellWhenLoading, knownBuildRuleTypesFactory);
TargetGraph reconstructedGraph = distributedBuildState.createTargetGraph(targetGraphCodec);
assertThat(reconstructedGraph.getNodes(), Matchers.hasSize(1));
TargetNode<JavaLibraryDescription.Arg, ?> reconstructedJavaLibrary =
FluentIterable.from(reconstructedGraph.getNodes()).get(0)
.castArg(JavaLibraryDescription.Arg.class).get();
ProjectFilesystem reconstructedCellFilesystem =
distributedBuildState.getCells().get(0).getFilesystem();
assertThat(
reconstructedJavaLibrary.getConstructorArg().srcs,
Matchers.contains(
new PathSourcePath(
reconstructedCellFilesystem,
reconstructedCellFilesystem.getRootPath().getFileSystem().getPath("A.java"))));
}
@Test
public void throwsOnPlatformMismatch() throws IOException, InterruptedException {
ProjectFilesystem filesystem = createJavaOnlyFilesystem("/opt/buck");
Config config = new Config(ConfigBuilder.rawFromLines());
BuckConfig buckConfig = new BuckConfig(
config,
filesystem,
Architecture.MIPSEL,
Platform.UNKNOWN,
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(filesystem.getRootPath(), config));
Cell cell = new TestCellBuilder()
.setFilesystem(filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(cell),
emptyActionGraph(),
createDefaultCodec(cell, Optional.empty()),
createTargetGraph(filesystem));
expectedException.expect(IllegalStateException.class);
DistBuildState.load(dump, cell, knownBuildRuleTypesFactory);
}
@Test
public void worksCrossCell() throws IOException, InterruptedException {
ProjectFilesystem parentFs = createJavaOnlyFilesystem("/saving");
Path cell1Root = parentFs.resolve("cell1");
Path cell2Root = parentFs.resolve("cell2");
parentFs.mkdirs(cell1Root);
parentFs.mkdirs(cell2Root);
ProjectFilesystem cell1Filesystem = new ProjectFilesystem(cell1Root);
ProjectFilesystem cell2Filesystem = new ProjectFilesystem(cell2Root);
Config config = new Config(ConfigBuilder.rawFromLines(
"[repositories]",
"cell2 = " + cell2Root.toString()));
BuckConfig buckConfig = new BuckConfig(
config,
cell1Filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(cell1Root, config));
Cell rootCellWhenSaving = new TestCellBuilder()
.setFilesystem(cell1Filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(rootCellWhenSaving),
emptyActionGraph(),
createDefaultCodec(rootCellWhenSaving, Optional.empty()),
createCrossCellTargetGraph(cell1Filesystem, cell2Filesystem));
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
DistBuildState distributedBuildState =
DistBuildState.load(dump, rootCellWhenLoading, knownBuildRuleTypesFactory);
ImmutableMap<Integer, Cell> cells = distributedBuildState.getCells();
assertThat(cells, Matchers.aMapWithSize(2));
}
private DistBuildFileHashes emptyActionGraph() throws IOException {
ActionGraph actionGraph = new ActionGraph(ImmutableList.of());
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
SourcePathResolver sourcePathResolver = new SourcePathResolver(ruleFinder);
ProjectFilesystem projectFilesystem = createJavaOnlyFilesystem("/opt/buck");
return new DistBuildFileHashes(
actionGraph,
sourcePathResolver,
ruleFinder,
DefaultFileHashCache.createDefaultFileHashCache(projectFilesystem),
Functions.constant(0),
MoreExecutors.newDirectExecutorService(),
/* keySeed */ 0,
FakeBuckConfig.builder().build());
}
private static DistBuildTargetGraphCodec createDefaultCodec(
final Cell cell,
final Optional<Parser> parser) {
ObjectMapper objectMapper = ObjectMappers.newDefaultInstance(); // NOPMD confused by lambda
BuckEventBus eventBus = BuckEventBusFactory.newInstance();
Function<? super TargetNode<?, ?>, ? extends Map<String, Object>> nodeToRawNode;
if (parser.isPresent()) {
nodeToRawNode = (Function<TargetNode<?, ?>, Map<String, Object>>) input -> {
try {
return parser.get().getRawTargetNode(
eventBus,
cell.getCell(input.getBuildTarget()),
/* enableProfiling */ false,
MoreExecutors.listeningDecorator(MoreExecutors.newDirectExecutorService()),
input);
} catch (BuildFileParseException e) {
throw new RuntimeException(e);
}
};
} else {
nodeToRawNode = Functions.constant(ImmutableMap.<String, Object>of());
}
DistBuildTypeCoercerFactory typeCoercerFactory =
new DistBuildTypeCoercerFactory(objectMapper);
ParserTargetNodeFactory<TargetNode<?, ?>> parserTargetNodeFactory =
DefaultParserTargetNodeFactory.createForDistributedBuild(
new ConstructorArgMarshaller(typeCoercerFactory),
new TargetNodeFactory(typeCoercerFactory));
return new DistBuildTargetGraphCodec(
objectMapper,
parserTargetNodeFactory,
nodeToRawNode);
}
private static TargetGraph createTargetGraph(ProjectFilesystem filesystem) {
return TargetGraphFactory.newInstance(
JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:foo"), filesystem)
.build());
}
private static TargetGraph createCrossCellTargetGraph(
ProjectFilesystem cellOneFilesystem,
ProjectFilesystem cellTwoFilesystem) {
Preconditions.checkArgument(!cellOneFilesystem.equals(cellTwoFilesystem));
BuildTarget target = BuildTargetFactory.newInstance(cellTwoFilesystem, "//:foo");
return TargetGraphFactory.newInstance(
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance(cellOneFilesystem, "//:foo"),
cellOneFilesystem)
.addSrc(new BuildTargetSourcePath(target))
.build(),
JavaLibraryBuilder.createBuilder(
target,
cellTwoFilesystem)
.build()
);
}
private static ProjectFilesystem createJavaOnlyFilesystem(String rootPath) throws IOException {
ProjectFilesystem filesystem = FakeProjectFilesystem.createJavaOnlyFilesystem(rootPath);
filesystem.mkdirs(filesystem.getBuckPaths().getBuckOut());
return filesystem;
}
}
| |
/*
* Title: CloudSim Toolkit Description: CloudSim (Cloud Simulation) Toolkit for Modeling and
* Simulation of Clouds Licence: GPL - http://www.gnu.org/copyleft/gpl.html
*
* Copyright (c) 2009-2012, The University of Melbourne, Australia
*/
package org.cloudbus.cloudsim;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
* Represents a Virtual Machine (VM) that runs inside a Host, sharing a hostList with other VMs. It processes
* cloudlets. This processing happens according to a policy, defined by the CloudletScheduler. Each
* VM has a owner, which can submit cloudlets to the VM to execute them.
*
* @author Rodrigo N. Calheiros
* @author Anton Beloglazov
* @since CloudSim Toolkit 1.0
*/
public class Vm {
/** The VM unique id. */
private int id;
/** The user id. */
private int userId;
/** A Unique Identifier (UID) for the VM, that is compounded by the user id and VM id. */
private String uid;
/** The size the VM image size (the amount of storage it will use, at least initially). */
private long size;
/** The MIPS capacity of each VM's PE. */
private double mips;
/** The number of PEs required by the VM. */
private int numberOfPes;
/** The required ram. */
private int ram;
/** The required bw. */
private long bw;
/** The Virtual Machine Monitor (VMM) that manages the VM. */
private String vmm;
/** The Cloudlet scheduler the VM uses to schedule cloudlets execution. */
private CloudletScheduler cloudletScheduler;
/** The PM that hosts the VM. */
private Host host;
/** Indicates if the VM is in migration process. */
private boolean inMigration;
/** The current allocated storage size. */
private long currentAllocatedSize;
/** The current allocated ram. */
private int currentAllocatedRam;
/** The current allocated bw. */
private long currentAllocatedBw;
/** The current allocated mips for each VM's PE. */
private List<Double> currentAllocatedMips;
/** Indicates if the VM is being instantiated. */
private boolean beingInstantiated;
/** The mips allocation history.
* @todo Instead of using a list, this attribute would be
* a map, where the key can be the history time
* and the value the history itself.
* By this way, if one wants to get the history for a given
* time, he/she doesn't have to iterate over the entire list
* to find the desired entry.
*/
private final List<VmStateHistoryEntry> stateHistory = new LinkedList<VmStateHistoryEntry>();
/**
* Creates a new Vm object.
*
* @param id unique ID of the VM
* @param userId ID of the VM's owner
* @param mips the mips
* @param numberOfPes amount of CPUs
* @param ram amount of ram
* @param bw amount of bandwidth
* @param size The size the VM image size (the amount of storage it will use, at least initially).
* @param vmm virtual machine monitor
* @param cloudletScheduler cloudletScheduler policy for cloudlets scheduling
*
* @pre id >= 0
* @pre userId >= 0
* @pre size > 0
* @pre ram > 0
* @pre bw > 0
* @pre cpus > 0
* @pre priority >= 0
* @pre cloudletScheduler != null
* @post $none
*/
public Vm(
int id,
int userId,
double mips,
int numberOfPes,
int ram,
long bw,
long size,
String vmm,
CloudletScheduler cloudletScheduler) {
setId(id);
setUserId(userId);
setUid(getUid(userId, id));
setMips(mips);
setNumberOfPes(numberOfPes);
setRam(ram);
setBw(bw);
setSize(size);
setVmm(vmm);
setCloudletScheduler(cloudletScheduler);
setInMigration(false);
setBeingInstantiated(true);
setCurrentAllocatedBw(0);
setCurrentAllocatedMips(null);
setCurrentAllocatedRam(0);
setCurrentAllocatedSize(0);
}
/**
* Updates the processing of cloudlets running on this VM.
*
* @param currentTime current simulation time
* @param mipsShare list with MIPS share of each Pe available to the scheduler
* @return time predicted completion time of the earliest finishing cloudlet, or 0 if there is no
* next events
* @pre currentTime >= 0
* @post $none
*/
public double updateVmProcessing(double currentTime, List<Double> mipsShare) {
if (mipsShare != null) {
return getCloudletScheduler().updateVmProcessing(currentTime, mipsShare);
}
return 0.0;
}
/**
* Gets the current requested mips.
*
* @return the current requested mips
*/
public List<Double> getCurrentRequestedMips() {
List<Double> currentRequestedMips = getCloudletScheduler().getCurrentRequestedMips();
if (isBeingInstantiated()) {
currentRequestedMips = new ArrayList<Double>();
for (int i = 0; i < getNumberOfPes(); i++) {
currentRequestedMips.add(getMips());
}
}
return currentRequestedMips;
}
/**
* Gets the current requested total mips.
*
* @return the current requested total mips
*/
public double getCurrentRequestedTotalMips() {
double totalRequestedMips = 0;
for (double mips : getCurrentRequestedMips()) {
totalRequestedMips += mips;
}
return totalRequestedMips;
}
/**
* Gets the current requested max mips among all virtual PEs.
*
* @return the current requested max mips
*/
public double getCurrentRequestedMaxMips() {
double maxMips = 0;
for (double mips : getCurrentRequestedMips()) {
if (mips > maxMips) {
maxMips = mips;
}
}
return maxMips;
}
/**
* Gets the current requested bw.
*
* @return the current requested bw
*/
public long getCurrentRequestedBw() {
if (isBeingInstantiated()) {
return getBw();
}
return (long) (getCloudletScheduler().getCurrentRequestedUtilizationOfBw() * getBw());
}
/**
* Gets the current requested ram.
*
* @return the current requested ram
*/
public int getCurrentRequestedRam() {
if (isBeingInstantiated()) {
return getRam();
}
return (int) (getCloudletScheduler().getCurrentRequestedUtilizationOfRam() * getRam());
}
/**
* Gets total CPU utilization percentage of all clouddlets running on this VM at the given time
*
* @param time the time
* @return total utilization percentage
*/
public double getTotalUtilizationOfCpu(double time) {
return getCloudletScheduler().getTotalUtilizationOfCpu(time);
}
/**
* Get total CPU utilization of all cloudlets running on this VM at the given time (in MIPS).
*
* @param time the time
* @return total cpu utilization in MIPS
* @see #getTotalUtilizationOfCpu(double)
*/
public double getTotalUtilizationOfCpuMips(double time) {
return getTotalUtilizationOfCpu(time) * getMips();
}
/**
* Sets the uid.
*
* @param uid the new uid
*/
public void setUid(String uid) {
this.uid = uid;
}
/**
* Gets unique string identifier of the VM.
*
* @return string uid
*/
public String getUid() {
return uid;
}
/**
* Generate unique string identifier of the VM.
*
* @param userId the user id
* @param vmId the vm id
* @return string uid
*/
public static String getUid(int userId, int vmId) {
return userId + "-" + vmId;
}
/**
* Gets the VM id.
*
* @return the id
*/
public int getId() {
return id;
}
/**
* Sets the VM id.
*
* @param id the new id
*/
protected void setId(int id) {
this.id = id;
}
/**
* Sets the user id.
*
* @param userId the new user id
*/
protected void setUserId(int userId) {
this.userId = userId;
}
/**
* Gets the ID of the owner of the VM.
*
* @return VM's owner ID
* @pre $none
* @post $none
*/
public int getUserId() {
return userId;
}
/**
* Gets the mips.
*
* @return the mips
*/
public double getMips() {
return mips;
}
/**
* Sets the mips.
*
* @param mips the new mips
*/
protected void setMips(double mips) {
this.mips = mips;
}
/**
* Gets the number of pes.
*
* @return the number of pes
*/
public int getNumberOfPes() {
return numberOfPes;
}
/**
* Sets the number of pes.
*
* @param numberOfPes the new number of pes
*/
protected void setNumberOfPes(int numberOfPes) {
this.numberOfPes = numberOfPes;
}
/**
* Gets the amount of ram.
*
* @return amount of ram
* @pre $none
* @post $none
*/
public int getRam() {
return ram;
}
/**
* Sets the amount of ram.
*
* @param ram new amount of ram
* @pre ram > 0
* @post $none
*/
public void setRam(int ram) {
this.ram = ram;
}
/**
* Gets the amount of bandwidth.
*
* @return amount of bandwidth
* @pre $none
* @post $none
*/
public long getBw() {
return bw;
}
/**
* Sets the amount of bandwidth.
*
* @param bw new amount of bandwidth
* @pre bw > 0
* @post $none
*/
public void setBw(long bw) {
this.bw = bw;
}
/**
* Gets the amount of storage.
*
* @return amount of storage
* @pre $none
* @post $none
*/
public long getSize() {
return size;
}
/**
* Sets the amount of storage.
*
* @param size new amount of storage
* @pre size > 0
* @post $none
*/
public void setSize(long size) {
this.size = size;
}
/**
* Gets the VMM.
*
* @return VMM
* @pre $none
* @post $none
*/
public String getVmm() {
return vmm;
}
/**
* Sets the VMM.
*
* @param vmm the new VMM
*/
protected void setVmm(String vmm) {
this.vmm = vmm;
}
/**
* Sets the host that runs this VM.
*
* @param host Host running the VM
* @pre host != $null
* @post $none
*/
public void setHost(Host host) {
this.host = host;
}
/**
* Gets the host.
*
* @return the host
*/
public Host getHost() {
return host;
}
/**
* Gets the vm scheduler.
*
* @return the vm scheduler
*/
public CloudletScheduler getCloudletScheduler() {
return cloudletScheduler;
}
/**
* Sets the vm scheduler.
*
* @param cloudletScheduler the new vm scheduler
*/
protected void setCloudletScheduler(CloudletScheduler cloudletScheduler) {
this.cloudletScheduler = cloudletScheduler;
}
/**
* Checks if is in migration.
*
* @return true, if is in migration
*/
public boolean isInMigration() {
return inMigration;
}
/**
* Sets the in migration.
*
* @param inMigration the new in migration
*/
public void setInMigration(boolean inMigration) {
this.inMigration = inMigration;
}
/**
* Gets the current allocated size.
*
* @return the current allocated size
*/
public long getCurrentAllocatedSize() {
return currentAllocatedSize;
}
/**
* Sets the current allocated size.
*
* @param currentAllocatedSize the new current allocated size
*/
protected void setCurrentAllocatedSize(long currentAllocatedSize) {
this.currentAllocatedSize = currentAllocatedSize;
}
/**
* Gets the current allocated ram.
*
* @return the current allocated ram
*/
public int getCurrentAllocatedRam() {
return currentAllocatedRam;
}
/**
* Sets the current allocated ram.
*
* @param currentAllocatedRam the new current allocated ram
*/
public void setCurrentAllocatedRam(int currentAllocatedRam) {
this.currentAllocatedRam = currentAllocatedRam;
}
/**
* Gets the current allocated bw.
*
* @return the current allocated bw
*/
public long getCurrentAllocatedBw() {
return currentAllocatedBw;
}
/**
* Sets the current allocated bw.
*
* @param currentAllocatedBw the new current allocated bw
*/
public void setCurrentAllocatedBw(long currentAllocatedBw) {
this.currentAllocatedBw = currentAllocatedBw;
}
/**
* Gets the current allocated mips.
*
* @return the current allocated mips
* @TODO replace returning the field by a call to getCloudletScheduler().getCurrentMipsShare()
*/
public List<Double> getCurrentAllocatedMips() {
return currentAllocatedMips;
}
/**
* Sets the current allocated mips.
*
* @param currentAllocatedMips the new current allocated mips
*/
public void setCurrentAllocatedMips(List<Double> currentAllocatedMips) {
this.currentAllocatedMips = currentAllocatedMips;
}
/**
* Checks if is being instantiated.
*
* @return true, if is being instantiated
*/
public boolean isBeingInstantiated() {
return beingInstantiated;
}
/**
* Sets the being instantiated.
*
* @param beingInstantiated the new being instantiated
*/
public void setBeingInstantiated(boolean beingInstantiated) {
this.beingInstantiated = beingInstantiated;
}
/**
* Gets the state history.
*
* @return the state history
*/
public List<VmStateHistoryEntry> getStateHistory() {
return stateHistory;
}
/**
* Adds a VM state history entry.
*
* @param time the time
* @param allocatedMips the allocated mips
* @param requestedMips the requested mips
* @param isInMigration the is in migration
*/
public void addStateHistoryEntry(
double time,
double allocatedMips,
double requestedMips,
boolean isInMigration) {
VmStateHistoryEntry newState = new VmStateHistoryEntry(
time,
allocatedMips,
requestedMips,
isInMigration);
if (!getStateHistory().isEmpty()) {
VmStateHistoryEntry previousState = getStateHistory().get(getStateHistory().size() - 1);
if (previousState.getTime() == time) {
getStateHistory().set(getStateHistory().size() - 1, newState);
return;
}
}
getStateHistory().add(newState);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
// Generated from protobuf
package org.apache.drill.exec.proto.beans;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import com.dyuproject.protostuff.GraphIOUtil;
import com.dyuproject.protostuff.Input;
import com.dyuproject.protostuff.Message;
import com.dyuproject.protostuff.Output;
import com.dyuproject.protostuff.Schema;
public final class QueryId implements Externalizable, Message<QueryId>, Schema<QueryId>
{
public static Schema<QueryId> getSchema()
{
return DEFAULT_INSTANCE;
}
public static QueryId getDefaultInstance()
{
return DEFAULT_INSTANCE;
}
static final QueryId DEFAULT_INSTANCE = new QueryId();
private long part1;
private long part2;
private String text;
public QueryId()
{
}
// getters and setters
// part1
public long getPart1()
{
return part1;
}
public QueryId setPart1(long part1)
{
this.part1 = part1;
return this;
}
// part2
public long getPart2()
{
return part2;
}
public QueryId setPart2(long part2)
{
this.part2 = part2;
return this;
}
// text
public String getText()
{
return text;
}
public QueryId setText(String text)
{
this.text = text;
return this;
}
// java serialization
public void readExternal(ObjectInput in) throws IOException
{
GraphIOUtil.mergeDelimitedFrom(in, this, this);
}
public void writeExternal(ObjectOutput out) throws IOException
{
GraphIOUtil.writeDelimitedTo(out, this, this);
}
// message method
public Schema<QueryId> cachedSchema()
{
return DEFAULT_INSTANCE;
}
// schema methods
public QueryId newMessage()
{
return new QueryId();
}
public Class<QueryId> typeClass()
{
return QueryId.class;
}
public String messageName()
{
return QueryId.class.getSimpleName();
}
public String messageFullName()
{
return QueryId.class.getName();
}
public boolean isInitialized(QueryId message)
{
return true;
}
public void mergeFrom(Input input, QueryId message) throws IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
message.part1 = input.readSFixed64();
break;
case 2:
message.part2 = input.readSFixed64();
break;
case 3:
message.text = input.readString();
break;
default:
input.handleUnknownField(number, this);
}
}
}
public void writeTo(Output output, QueryId message) throws IOException
{
if(message.part1 != 0)
output.writeSFixed64(1, message.part1, false);
if(message.part2 != 0)
output.writeSFixed64(2, message.part2, false);
if(message.text != null)
output.writeString(3, message.text, false);
}
public String getFieldName(int number)
{
switch(number)
{
case 1: return "part1";
case 2: return "part2";
case 3: return "text";
default: return null;
}
}
public int getFieldNumber(String name)
{
final Integer number = __fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
static
{
__fieldMap.put("part1", 1);
__fieldMap.put("part2", 2);
__fieldMap.put("text", 3);
}
}
| |
package org.lnu.is.dao.builder;
import org.junit.Test;
import org.lnu.is.pagination.OrderBy;
import org.lnu.is.pagination.OrderByType;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class BaseQueryBuilderTest {
@Test
public void testBuild() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String expected = "SELECT FROM SMTH WHERE condition ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithOrderBy() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
OrderBy orderBy1 = new OrderBy("name1", OrderByType.ASC);
OrderBy orderBy2 = new OrderBy("name2", OrderByType.DESC);
List<OrderBy> orders = Arrays.asList(orderBy1, orderBy2);
String expected = "SELECT FROM SMTH WHERE condition ORDER BY e.name1 ASC, e.name2 DESC";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.orderBy(orders)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithOrderByWithLambdaFunction() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String condition3 = "condition2.name = ''{0}'' ";
List<String> names = Arrays.asList("name1", "name2", "name3");
OrderBy orderBy1 = new OrderBy("name1", OrderByType.ASC);
OrderBy orderBy2 = new OrderBy("name2", OrderByType.DESC);
List<OrderBy> orders = Arrays.asList(orderBy1, orderBy2);
String expected = "SELECT FROM SMTH WHERE condition AND (condition2.name = 'name1' OR condition2.name = 'name2' OR condition2.name = 'name3' ) ORDER BY e.name1 ASC, e.name2 DESC";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.addAndConditionForLoop(o -> MessageFormat.format(condition3, o), names)
.orderBy(orders)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithSingleOrderBy() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
OrderBy orderBy1 = new OrderBy("name1", OrderByType.ASC);
List<OrderBy> orders = Arrays.asList(orderBy1);
String expected = "SELECT FROM SMTH WHERE condition ORDER BY e.name1 ASC";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.orderBy(orders)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithEmptyOrderBy() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
List<OrderBy> orders = Collections.emptyList();
String expected = "SELECT FROM SMTH WHERE condition ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.orderBy(orders)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildAnd() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String expected = "SELECT FROM SMTH WHERE ( condition ) ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.openBracket()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.closeBracket()
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithComplicated() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String condition2 = "condition2 ";
Object parameter2 = "fdsfds";
String condition3 = "condition3 ";
Object parameter3 = "qwrvcvds";
String expected = "SELECT FROM SMTH WHERE ( condition ) AND condition2 AND condition3 ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.openBracket()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.closeBracket()
.addAndCondition(condition2, parameter2)
.addAndCondition(condition3, parameter3)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithTwoOrConditions() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String condition2 = "condition2 ";
Object parameter2 = "fdsfds";
String condition3 = "condition3 ";
Object parameter3 = "qwrvcvds";
String condition4 = "condition4 ";
Object parameter4 = "lasflasnlkm";
String expected = "SELECT FROM SMTH WHERE ( condition ) AND condition2 AND condition3 OR condition4 ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.openBracket()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.closeBracket()
.addAndCondition(condition2, parameter2)
.addAndCondition(condition3, parameter3)
.addOrCondition(condition4, parameter4)
.build();
// Then
assertEquals(expected, actual);
}
@Test
public void testBuildWithTwoOrConditionsAndAndCondition() throws Exception {
// Given
String query = "SELECT FROM SMTH %s";
String condition = "condition ";
Object parameter = new Object();
String condition1 = "condition1 ";
Object nullParameter = null;
String condition2 = "condition2 ";
Object parameter2 = "fdsfds";
String condition3 = "condition3 ";
Object parameter3 = "qwrvcvds";
String condition4 = "condition4 ";
Object parameter4 = "lasflasnlkm";
String condition5 = "condition5 ";
Object parameter5 = "m,ncksadkj";
String expected = "SELECT FROM SMTH WHERE ( condition ) AND condition2 AND condition3 OR condition4 AND condition5 ";
// When
String actual = BaseQueryBuilder.getInstance(query)
.where()
.openBracket()
.addOrCondition(condition, parameter)
.addOrCondition(condition1, nullParameter)
.closeBracket()
.addAndCondition(condition2, parameter2)
.addAndCondition(condition3, parameter3)
.addOrCondition(condition4, parameter4)
.addAndCondition(condition5, parameter5)
.build();
// Then
assertEquals(expected, actual);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.xml;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import com.intellij.html.impl.RelaxedHtmlFromSchemaElementDescriptor;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.html.HtmlTag;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlElement;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xml.XmlElementDescriptor;
import com.intellij.xml.XmlElementDescriptorAwareAboutChildren;
import com.intellij.xml.XmlExtension;
import com.intellij.xml.XmlNSDescriptor;
import com.intellij.xml.impl.schema.AnyXmlElementDescriptor;
import com.intellij.xml.impl.schema.XmlElementDescriptorImpl;
import com.intellij.xml.util.HtmlUtil;
import com.intellij.xml.util.XmlUtil;
public class TagNameVariantCollector
{
public static List<XmlElementDescriptor> getTagDescriptors(final XmlTag element, final Collection<String> namespaces, @Nullable List<String> nsInfo)
{
XmlElementDescriptor elementDescriptor = null;
String elementNamespace = element.getNamespacePrefix().isEmpty() ? null : element.getNamespace();
final Map<String, XmlElementDescriptor> descriptorsMap = new HashMap<>();
PsiElement context = element.getParent();
PsiElement curElement = element.getParent();
while(curElement instanceof XmlTag)
{
final XmlTag declarationTag = (XmlTag) curElement;
final String namespace = declarationTag.getNamespace();
if(!descriptorsMap.containsKey(namespace))
{
final XmlElementDescriptor descriptor = declarationTag.getDescriptor();
if(descriptor != null)
{
descriptorsMap.put(namespace, descriptor);
if(elementDescriptor == null)
{
elementDescriptor = descriptor;
if(elementNamespace == null)
{
elementNamespace = namespace;
}
}
}
}
curElement = curElement.getContext();
}
final Set<XmlNSDescriptor> visited = new HashSet<>();
final XmlExtension extension = XmlExtension.getExtension(element.getContainingFile());
final ArrayList<XmlElementDescriptor> variants = new ArrayList<>();
for(final String namespace : namespaces)
{
final int initialSize = variants.size();
processVariantsInNamespace(namespace, element, variants, elementDescriptor, elementNamespace, descriptorsMap, visited, context instanceof XmlTag ? (XmlTag) context : element, extension);
if(nsInfo != null)
{
for(int i = initialSize; i < variants.size(); i++)
{
XmlElementDescriptor descriptor = variants.get(i);
nsInfo.add(descriptor instanceof XmlElementDescriptorImpl && !(descriptor instanceof RelaxedHtmlFromSchemaElementDescriptor) ? ((XmlElementDescriptorImpl) descriptor)
.getNamespaceByContext(element) : namespace);
}
}
}
final boolean hasPrefix = StringUtil.isNotEmpty(element.getNamespacePrefix());
return ContainerUtil.filter(variants, descriptor ->
{
if(descriptor instanceof AnyXmlElementDescriptor)
{
return false;
}
else if(hasPrefix && descriptor instanceof XmlElementDescriptorImpl && !namespaces.contains(((XmlElementDescriptorImpl) descriptor).getNamespace()))
{
return false;
}
return true;
});
}
private static void processVariantsInNamespace(final String namespace,
final XmlTag element,
final List<XmlElementDescriptor> variants,
final XmlElementDescriptor elementDescriptor,
final String elementNamespace,
final Map<String, XmlElementDescriptor> descriptorsMap,
final Set<XmlNSDescriptor> visited,
XmlTag parent,
final XmlExtension extension)
{
if(descriptorsMap.containsKey(namespace))
{
final XmlElementDescriptor descriptor = descriptorsMap.get(namespace);
if(isAcceptableNs(element, elementDescriptor, elementNamespace, namespace))
{
for(XmlElementDescriptor containedDescriptor : descriptor.getElementsDescriptors(parent))
{
if(containedDescriptor != null)
{
variants.add(containedDescriptor);
}
}
}
if(element instanceof HtmlTag)
{
HtmlUtil.addHtmlSpecificCompletions(descriptor, element, variants);
}
visited.add(descriptor.getNSDescriptor());
}
else
{
// Don't use default namespace in case there are other namespaces in scope
// If there are tags from default namespace they will be handled via
// their element descriptors (prev if section)
if(namespace == null)
{
return;
}
if(namespace.isEmpty() && !visited.isEmpty())
{
return;
}
XmlNSDescriptor nsDescriptor = getDescriptor(element, namespace, true, extension);
if(nsDescriptor == null)
{
if(!descriptorsMap.isEmpty())
{
return;
}
nsDescriptor = getDescriptor(element, namespace, false, extension);
}
if(nsDescriptor != null && !visited.contains(nsDescriptor) && isAcceptableNs(element, elementDescriptor, elementNamespace, namespace))
{
visited.add(nsDescriptor);
final XmlElementDescriptor[] rootElementsDescriptors = nsDescriptor.getRootElementsDescriptors(PsiTreeUtil.getParentOfType(element, XmlDocument.class));
final XmlTag parentTag = extension.getParentTagForNamespace(element, nsDescriptor);
final XmlElementDescriptor parentDescriptor;
if(parentTag == element.getParentTag())
{
parentDescriptor = elementDescriptor;
}
else
{
assert parentTag != null;
parentDescriptor = parentTag.getDescriptor();
}
for(XmlElementDescriptor candidateDescriptor : rootElementsDescriptors)
{
if(candidateDescriptor != null && couldContainDescriptor(parentTag, parentDescriptor, candidateDescriptor, namespace, false))
{
variants.add(candidateDescriptor);
}
}
}
}
}
private static XmlNSDescriptor getDescriptor(final XmlTag element, final String namespace, final boolean strict, final XmlExtension extension)
{
return extension.getNSDescriptor(element, namespace, strict);
}
static boolean couldContainDescriptor(final XmlTag parentTag, final XmlElementDescriptor parentDescriptor, final XmlElementDescriptor childDescriptor, String childNamespace, boolean strict)
{
if(XmlUtil.nsFromTemplateFramework(childNamespace))
{
return true;
}
if(parentTag == null)
{
return true;
}
if(parentDescriptor == null)
{
return false;
}
final XmlTag childTag = parentTag.createChildTag(childDescriptor.getName(), childNamespace, null, false);
childTag.putUserData(XmlElement.INCLUDING_ELEMENT, parentTag);
XmlElementDescriptor descriptor = parentDescriptor.getElementDescriptor(childTag, parentTag);
return descriptor != null && (!strict || !(descriptor instanceof AnyXmlElementDescriptor));
}
private static boolean isAcceptableNs(final XmlTag element, final XmlElementDescriptor elementDescriptor, final String elementNamespace, final String namespace)
{
return !(elementDescriptor instanceof XmlElementDescriptorAwareAboutChildren) || elementNamespace == null || elementNamespace.equals(namespace) || ((XmlElementDescriptorAwareAboutChildren)
elementDescriptor).allowElementsFromNamespace(namespace, element.getParentTag());
}
public static boolean couldContain(XmlTag parent, XmlTag child)
{
return couldContainDescriptor(parent, parent.getDescriptor(), child.getDescriptor(), child.getNamespace(), true);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.destination.jdbc;
import com.streamsets.pipeline.api.Batch;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.api.Target;
import com.streamsets.pipeline.api.base.BaseTarget;
import com.streamsets.pipeline.api.base.OnRecordErrorException;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.lib.jdbc.ChangeLogFormat;
import com.streamsets.pipeline.lib.jdbc.JdbcGenericRecordWriter;
import com.streamsets.pipeline.lib.jdbc.JdbcRecordWriter;
import com.streamsets.pipeline.lib.jdbc.JdbcUtil;
import com.streamsets.pipeline.lib.jdbc.MicrosoftJdbcRecordWriter;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* JDBC Destination for StreamSets Data Collector
*/
public class JdbcTarget extends BaseTarget {
private static final Logger LOG = LoggerFactory.getLogger(JdbcTarget.class);
private static final String CUSTOM_MAPPINGS = "columnNames";
private static final String TABLE_NAME = "tableName";
private static final String CONNECTION_STRING = "connectionString";
private static final String DRIVER_CLASSNAME = "driverClassName";
private final String connectionString;
private final String username;
private final String password;
private final boolean rollbackOnError;
private final String tableName;
private final List<JdbcFieldMappingConfig> customMappings;
private final Properties driverProperties = new Properties();
private final String driverClassName;
private final String connectionTestQuery;
private final ChangeLogFormat changeLogFormat;
private HikariDataSource dataSource = null;
private Connection connection = null;
private JdbcRecordWriter recordWriter;
public JdbcTarget(
final String connectionString,
final String username,
final String password,
final String tableName,
final List<JdbcFieldMappingConfig> customMappings,
final boolean rollbackOnError,
final Map<String, String> driverProperties,
final ChangeLogFormat changeLogFormat,
final String driverClassName,
final String connectionTestQuery
) {
this.connectionString = connectionString;
this.username = username;
this.password = password;
this.tableName = tableName;
this.customMappings = customMappings;
this.rollbackOnError = rollbackOnError;
if (driverProperties != null) {
this.driverProperties.putAll(driverProperties);
}
this.changeLogFormat = changeLogFormat;
this.driverClassName = driverClassName;
this.connectionTestQuery = connectionTestQuery;
}
@Override
protected List<ConfigIssue> init() {
List<ConfigIssue> issues = super.init();
Target.Context context = getContext();
if (createDataSource(issues)) {
try {
createRecordWriter();
} catch (StageException e) {
issues.add(context.createConfigIssue(
Groups.JDBC.name(),
changeLogFormat.getLabel(),
Errors.JDBCDEST_13,
e.toString()
)
);
}
}
return issues;
}
@Override
public void destroy() {
closeQuietly(connection);
if (null != dataSource) {
dataSource.close();
}
super.destroy();
}
private void closeQuietly(AutoCloseable c) {
try {
if (null != c) {
c.close();
}
} catch (Exception ignored) {}
}
private void createRecordWriter() throws StageException {
switch (changeLogFormat) {
case NONE:
recordWriter = new JdbcGenericRecordWriter(connectionString, dataSource, tableName, rollbackOnError, customMappings);
break;
case MSSQL:
recordWriter = new MicrosoftJdbcRecordWriter(connectionString, dataSource, tableName);
break;
default:
throw new IllegalStateException("Unrecognized format specified: " + changeLogFormat);
}
}
private boolean createDataSource(List<ConfigIssue> issues) {
if (null != dataSource) {
return false;
}
HikariConfig config = new HikariConfig();
config.setJdbcUrl(connectionString);
config.setUsername(username);
config.setPassword(password);
config.setAutoCommit(false);
if (driverClassName != null && !driverClassName.isEmpty()) {
config.setDriverClassName(driverClassName);
}
// These do not need to be user-configurable
config.setMaximumPoolSize(2);
if (connectionTestQuery != null && !connectionTestQuery.isEmpty()) {
config.setConnectionTestQuery(connectionTestQuery);
}
// User configurable JDBC driver properties
config.setDataSourceProperties(driverProperties);
try {
dataSource = new HikariDataSource(config);
// Test connectivity
connection = dataSource.getConnection();
ResultSet res = JdbcUtil.getTableMetadata(connection, tableName);
if (!res.next()) {
issues.add(getContext().createConfigIssue(Groups.JDBC.name(), TABLE_NAME, Errors.JDBCDEST_16, tableName));
} else {
ResultSet columns = JdbcUtil.getColumnMetadata(connection, tableName);
Set<String> columnNames = new HashSet<>();
while (columns.next()) {
columnNames.add(columns.getString(4));
}
for (JdbcFieldMappingConfig customMapping : customMappings) {
if (!columnNames.contains(customMapping.columnName)) {
issues.add(getContext().createConfigIssue(
Groups.JDBC.name(), CUSTOM_MAPPINGS, Errors.JDBCDEST_04, customMapping.field, customMapping.columnName
)
);
}
}
}
connection.close();
} catch (RuntimeException | SQLException e) {
LOG.debug("Could not connect to data source", e);
issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STRING, Errors.JDBCDEST_00, e.toString()));
return false;
}
return true;
}
@Override
@SuppressWarnings("unchecked")
public void write(Batch batch) throws StageException {
List<OnRecordErrorException> errors = recordWriter.writeBatch(batch);
for (OnRecordErrorException error : errors) {
handleErrorRecord(error);
}
}
private void handleErrorRecord(OnRecordErrorException error) throws StageException {
switch (getContext().getOnErrorRecord()) {
case DISCARD:
break;
case TO_ERROR:
getContext().toError(error.getRecord(), error);
break;
case STOP_PIPELINE:
throw error;
default:
throw new IllegalStateException(
Utils.format("It should never happen. OnError '{}'", getContext().getOnErrorRecord(), error)
);
}
}
}
| |
/*
* Copyright (C) 2003, 2004 Jason Bevins (original libnoise code)
* Copyright 2010 Thomas J. Hodge (java port of libnoise)
*
* This file is part of libnoiseforjava.
*
* libnoiseforjava is a Java port of the C++ library libnoise, which may be found at
* http://libnoise.sourceforge.net/. libnoise was developed by Jason Bevins, who may be
* contacted at jlbezigvins@gmzigail.com (for great email, take off every 'zig').
* Porting to Java was done by Thomas Hodge, who may be contacted at
* libnoisezagforjava@gzagmail.com (remove every 'zag').
*
* libnoiseforjava is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later version.
*
* libnoiseforjava is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* libnoiseforjava. If not, see <http://www.gnu.org/licenses/>.
*
*/
package libnoiseforjava.module;
import libnoiseforjava.Interp;
import libnoiseforjava.Misc;
import libnoiseforjava.exception.ExceptionInvalidParam;
public class Terrace extends ModuleBase {
/// Noise module that maps the output value from a source module onto a
/// terrace-forming curve.
///
/// This noise module maps the output value from the source module onto a
/// terrace-forming curve. The start of this curve has a slope of zero;
/// its slope then smoothly increases. This curve also contains
/// <i>control points</i> which resets the slope to zero at that point,
/// producing a "terracing" effect. Refer to the following illustration:
///
/// @image html terrace.png
///
/// To add a control point to this noise module, call the
/// addControlPoint() method.
///
/// An application must add a minimum of two control points to the curve.
/// If this is not done, the getValue() method fails. The control points
/// can have any value, although no two control points can have the same
/// value. There is no limit to the number of control points that can be
/// added to the curve.
///
/// This noise module clamps the output value from the source module if
/// that value is less than the value of the lowest control point or
/// greater than the value of the highest control point.
///
/// This noise module is often used to generate terrain features such as
/// your stereotypical desert canyon.
///
/// This noise module requires one source module.
/// Number of control points stored in this noise module.
int controlPointCount;
/// Determines if the terrace-forming curve between all control points
/// is inverted.
boolean invertTerraces;
/// Array that stores the control points.
double[] controlPoints;
public Terrace(ModuleBase sourceModule) throws ExceptionInvalidParam {
super(1);
setSourceModule(0, sourceModule);
controlPointCount = 0;
invertTerraces = false;
controlPoints = new double[0];
}
/// Adds a control point to the terrace-forming curve.
///
/// @param value The value of the control point to add.
///
/// @pre No two control points have the same value.
///
/// @throw ExceptionInvalidParam An invalid parameter was
/// specified; see the preconditions for more information.
///
/// Two or more control points define the terrace-forming curve. The
/// start of this curve has a slope of zero; its slope then smoothly
/// increases. At the control points, its slope resets to zero.
///
/// It does not matter which order these points are added.
public void addControlPoint(double value) throws ExceptionInvalidParam {
// Find the insertion point for the new control point and insert the new
// point at that position. The control point array will remain sorted by
// value.
int insertionPos = findInsertionPos(value);
insertAtPos(insertionPos, value);
}
/// Deletes all the control points on the terrace-forming curve.
///
/// @post All control points on the terrace-forming curve are deleted.
public void clearAllControlPoints() {
controlPoints = null;
controlPointCount = 0;
}
/// Determines the array index in which to insert the control point
/// into the internal control point array.
///
/// @param value The value of the control point.
///
/// @returns The array index in which to insert the control point.
///
/// @pre No two control points have the same value.
///
/// @throw ExceptionInvalidParam An invalid parameter was
/// specified; see the preconditions for more information.
///
/// By inserting the control point at the returned array index, this
/// class ensures that the control point array is sorted by value.
/// The code that maps a value onto the curve requires a sorted
/// control point array.
public int findInsertionPos(double value) throws ExceptionInvalidParam {
int insertionPos;
for (insertionPos = 0; insertionPos < controlPointCount; insertionPos++) {
if (value < controlPoints[insertionPos])
// We found the array index in which to insert the new control point.
// Exit now.
break;
else if (value == controlPoints[insertionPos])
// Each control point is required to contain a unique value, so throw
// an exception.
throw new ExceptionInvalidParam("Invalid Parameter in Terrace Noise Moduled");
}
return insertionPos;
}
public double getValue(double x, double y, double z) {
assert (sourceModules[0] != null);
assert (controlPointCount >= 2);
// Get the output value from the source module.
double sourceModuleValue = sourceModules[0].getValue(x, y, z);
// Find the first element in the control point array that has a value
// larger than the output value from the source module.
int indexPos;
for (indexPos = 0; indexPos < controlPointCount; indexPos++) {
if (sourceModuleValue < controlPoints[indexPos])
break;
}
// Find the two nearest control points so that we can map their values
// onto a quadratic curve.
int index0 = Misc.ClampValue(indexPos - 1, 0, controlPointCount - 1);
int index1 = Misc.ClampValue(indexPos, 0, controlPointCount - 1);
// If some control points are missing (which occurs if the output value from
// the source module is greater than the largest value or less than the
// smallest value of the control point array), get the value of the nearest
// control point and exit now.
if (index0 == index1)
return controlPoints[index1];
// Compute the alpha value used for linear interpolation.
double value0 = controlPoints[index0];
double value1 = controlPoints[index1];
double alpha = (sourceModuleValue - value0) / (value1 - value0);
if (invertTerraces) {
alpha = 1.0 - alpha;
double tempValue = value0;
value0 = value1;
value1 = tempValue;
}
// Squaring the alpha produces the terrace effect.
alpha *= alpha;
// Now perform the linear interpolation given the alpha value.
return Interp.linearInterp(value0, value1, alpha);
}
/// Inserts the control point at the specified position in the
/// internal control point array.
///
/// @param insertionPos The zero-based array position in which to
/// insert the control point.
/// @param value The value of the control point.
///
/// To make room for this new control point, this method reallocates
/// the control point array and shifts all control points occurring
/// after the insertion position up by one.
///
/// Because the curve mapping algorithm in this noise module requires
/// that all control points in the array be sorted by value, the new
/// control point should be inserted at the position in which the
/// order is still preserved.
public void insertAtPos(int insertionPos, double value) {
// Make room for the new control point at the specified position within
// the control point array. The position is determined by the value of
// the control point; the control points must be sorted by value within
// that array.
double[] newControlPoints = new double[controlPointCount + 1];
for (int i = 0; i < controlPointCount; i++) {
if (i < insertionPos)
newControlPoints[i] = controlPoints[i];
else
newControlPoints[i + 1] = controlPoints[i];
}
controlPoints = newControlPoints;
++controlPointCount;
// Now that we've made room for the new control point within the array,
// add the new control point.
controlPoints[insertionPos] = value;
}
/// Creates a number of equally-spaced control points that range from
/// -1 to +1.
///
/// @param controlPointCount The number of control points to generate.
///
/// @pre The number of control points must be greater than or equal to
/// 2.
///
/// @post The previous control points on the terrace-forming curve are
/// deleted.
///
/// @throw ExceptionInvalidParam An invalid parameter was
/// specified; see the preconditions for more information.
///
/// Two or more control points define the terrace-forming curve. The
/// start of this curve has a slope of zero; its slope then smoothly
/// increases. At the control points, its slope resets to zero.
void makeControlPoints(int controlPointCount) throws ExceptionInvalidParam {
if (controlPointCount < 2)
throw new ExceptionInvalidParam("Invalid Parameter in Terrace Noise Module");
clearAllControlPoints();
double terraceStep = 2.0 / ((double) controlPointCount - 1.0);
double curValue = -1.0;
for (int i = 0; i < (int) controlPointCount; i++) {
addControlPoint(curValue);
curValue += terraceStep;
}
}
/// Returns a pointer to the array of control points on the
/// terrace-forming curve.
///
/// @returns A pointer to the array of control points in this noise
/// module.
///
/// Two or more control points define the terrace-forming curve. The
/// start of this curve has a slope of zero; its slope then smoothly
/// increases. At the control points, its slope resets to zero.
///
/// Before calling this method, call getControlPointCount() to
/// determine the number of control points in this array.
///
/// It is recommended that an application does not store this pointer
/// for later use since the pointer to the array may change if the
/// application calls another method of this object.
public double[] getControlPointArray() {
return controlPoints;
}
/// Returns the number of control points on the terrace-forming curve.
///
/// @returns The number of control points on the terrace-forming
/// curve.
public int getControlPointCount() {
return controlPointCount;
}
/// Enables or disables the inversion of the terrace-forming curve
/// between the control points.
///
/// @param invert Specifies whether to invert the curve between the
/// control points.
public void invertTerraces(boolean invert) {
if (invert)
invertTerraces = invert;
}
/// Determines if the terrace-forming curve between the control
/// points is inverted.
///
/// @returns
/// - @a true if the curve between the control points is inverted.
/// - @a false if the curve between the control points is not
/// inverted.
public boolean isTerracesInverted() {
return invertTerraces;
}
}
| |
package org.robolectric.shadows;
import android.app.DownloadManager;
import android.database.Cursor;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.annotation.RealObject;
import org.robolectric.tester.android.database.TestCursor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.robolectric.Robolectric.shadowOf_;
/**
* Shadows Androids DownloadManager
*/
@Implements(DownloadManager.class)
public class ShadowDownloadManager {
private Map<Long, DownloadManager.Request> requestMap = new HashMap<Long, DownloadManager.Request>();
private long queueCounter = -1; // First request starts at 0 just like in the real DownloadManager.
@Implementation
public long enqueue(DownloadManager.Request request) {
queueCounter++;
requestMap.put(queueCounter, request);
return queueCounter;
}
@Implementation
public int remove(long... ids) {
int removeCount = 0;
for (long id : ids) {
if (requestMap.remove(id) != null) {
removeCount++;
}
}
return removeCount;
}
@Implementation
public Cursor query(DownloadManager.Query query) {
ShadowQuery shadow = shadowOf_(query);
long[] ids = shadow.getIds();
ResultCursor result = new ResultCursor();
for (long id : ids) {
DownloadManager.Request request = requestMap.get(id);
if (request != null) {
result.requests.add(request);
}
}
return result;
}
public DownloadManager.Request getRequest(long id) {
return requestMap.get(id);
}
public int getRequestCount() {
return requestMap.size();
}
@Implements(DownloadManager.Request.class)
public static class ShadowRequest {
@RealObject
DownloadManager.Request realObject;
private CharSequence description;
private int status;
@Implementation
public DownloadManager.Request setAllowedNetworkTypes(int flags) {
return realObject;
}
@Implementation
public DownloadManager.Request setMimeType(String mimeType) {
return realObject;
}
@Implementation
public DownloadManager.Request setTitle(CharSequence title) {
return realObject;
}
@Implementation
public DownloadManager.Request setDescription(CharSequence description) {
this.description = description;
return realObject;
}
@Implementation
public DownloadManager.Request setAllowedOverRoaming(boolean allowed) {
return realObject;
}
@Implementation
public DownloadManager.Request setDestinationInExternalPublicDir(String dirType, String subPath) {
return realObject;
}
public CharSequence getDescription() {
return this.description;
}
public void setStatus(int status) {
this.status = status;
}
public int getStatus() {
return this.status;
}
}
@Implements(DownloadManager.Query.class)
public static class ShadowQuery {
@RealObject
private DownloadManager.Query realObject;
private long[] ids = null;
@Implementation
public DownloadManager.Query setFilterById(long... ids) {
this.ids = ids;
return realObject;
}
public long[] getIds() {
return this.ids;
}
}
private class ResultCursor extends TestCursor {
private static final int COLUMN_INDEX_LOCAL_FILENAME = 0;
private static final int COLUMN_INDEX_DESCRIPTION = 1;
private static final int COLUMN_INDEX_REASON = 2;
private static final int COLUMN_INDEX_STATUS = 3;
public List<DownloadManager.Request> requests = new ArrayList<DownloadManager.Request>();
private int positionIndex;
private boolean closed;
@Override
public int getCount() {
checkClosed();
return requests.size();
}
@Override
public boolean moveToFirst() {
checkClosed();
positionIndex = 0;
return !requests.isEmpty();
}
@Override
public int getColumnIndex(String columnName) {
checkClosed();
if (DownloadManager.COLUMN_LOCAL_FILENAME.equals(columnName)) {
return COLUMN_INDEX_LOCAL_FILENAME;
} else if (DownloadManager.COLUMN_DESCRIPTION.equals(columnName)) {
return COLUMN_INDEX_DESCRIPTION;
} else if (DownloadManager.COLUMN_REASON.equals(columnName)) {
return COLUMN_INDEX_REASON;
} else if (DownloadManager.COLUMN_STATUS.equals(columnName)) {
return COLUMN_INDEX_STATUS;
}
return 0;
}
@Override
public void close() {
this.closed = true;
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public String getString(int columnIndex) {
checkClosed();
ShadowRequest request = shadowOf_(requests.get(positionIndex));
switch (columnIndex) {
case COLUMN_INDEX_LOCAL_FILENAME:
return "local file name not implemented";
case COLUMN_INDEX_DESCRIPTION:
return request.getDescription().toString();
case COLUMN_INDEX_REASON:
return "reason not implemented";
}
return "Unknown ColumnIndex " + columnIndex;
}
@Override
public int getInt(int columnIndex) {
checkClosed();
ShadowRequest request = shadowOf_(requests.get(positionIndex));
if (columnIndex == COLUMN_INDEX_STATUS) {
return request.getStatus();
}
return 0;
}
private void checkClosed() {
if (closed) {
throw new IllegalStateException("Cursor is already closed.");
}
}
}
}
| |
package com.gdn.x.beirut.clientsdk;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.gdn.common.client.GdnRestClientConfiguration;
import com.gdn.common.web.wrapper.response.GdnBaseRestResponse;
import com.gdn.common.web.wrapper.response.GdnRestListResponse;
import com.gdn.common.web.wrapper.response.GdnRestSingleResponse;
import com.gdn.x.beirut.dto.request.ApplyNewPositionModelDTORequest;
import com.gdn.x.beirut.dto.request.CandidateDTORequest;
import com.gdn.x.beirut.dto.request.CandidatePositionBindRequest;
import com.gdn.x.beirut.dto.request.ListStringRequest;
import com.gdn.x.beirut.dto.request.PositionDTORequest;
import com.gdn.x.beirut.dto.request.StatusDTORequest;
import com.gdn.x.beirut.dto.request.UpdateCandidateStatusModelDTORequest;
import com.gdn.x.beirut.dto.response.CandidateDTOResponse;
import com.gdn.x.beirut.dto.response.CandidateDTOResponseWithoutDetail;
import com.gdn.x.beirut.dto.response.CandidatePositionSolrDTOResponse;
import com.gdn.x.beirut.dto.response.PositionDTOResponse;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = {TestConfigClientSDK.class})
public class BeirutApiClientIT {
private static final String CONTEXT_PATH = "/beirut/api";
private static final String STORE_ID = "1";
private static final String CHANNEL_ID = "1";
private static final String CLIENT_ID = "1";
private static final Integer PORT = 8180;
private static final String HOST = "localhost";
private static final String PASSWORD = "DUMMY";
private static final String USERNAME = "I_TEST_USER";
private static final String REQUEST_ID = "1";
private long timestamp;
private BeirutApiClient beirutApiClient;
private List<String> positionIds;
private List<String> positionIds1;
private GdnRestListResponse<PositionDTOResponse> resultPosition;
private GdnRestListResponse<CandidateDTOResponseWithoutDetail> resultCandidate;
@Before
public void inititalize() throws Exception {
beirutApiClient = new BeirutApiClient(new GdnRestClientConfiguration(USERNAME, PASSWORD, HOST,
PORT, CLIENT_ID, CHANNEL_ID, STORE_ID), CONTEXT_PATH);
timestamp = System
.currentTimeMillis();/*
* // Generate Position List<PositionDTORequest> positionDTORequests =
* new ArrayList<PositionDTORequest>(); for (int i = 0; i < 5; i++) {
* PositionDTORequest positionDTORequest = new PositionDTORequest();
* positionDTORequest.setTitle("Software Developer Division " + i + " "
* + timestamp); positionDTORequests.add(positionDTORequest); } //
* insert Position ObjectMapper objectMapper = new ObjectMapper();
* byte[] contentDescription = {69, 103, 97}; for (PositionDTORequest
* positionDTORequest : positionDTORequests) {
* beirutApiClient.insertNewPosition(REQUEST_ID, USERNAME,
* objectMapper.writeValueAsString(positionDTORequest), "filename.txt",
* contentDescription); }
*
* // get Position ids positionIds = new ArrayList<String>();
* positionIds1 = new ArrayList<String>(); resultPosition =
* beirutApiClient.getAllPositionByStoreId(REQUEST_ID, USERNAME); for
* (PositionDTOResponse positionDTOResponse :
* resultPosition.getContent()) { if
* (positionDTOResponse.getTitle().equals(
* "Software Developer Division 1 " + timestamp) ||
* positionDTOResponse.getTitle().equals(
* "Software Developer Division 4 " + timestamp)) {
* positionIds.add(positionDTOResponse.getId()); } if
* (positionDTOResponse.getTitle().equals(
* "Software Developer Division 2 " + timestamp) ||
* positionDTOResponse.getTitle().equals(
* "Software Developer Division 3 " + timestamp)) {
* positionIds1.add(positionDTOResponse.getId()); } }
*
* // assign positionIds to the new candidate String
* candidateDTORequestString = "{\"emailAddress\": \"asda@egamail.com"
* + timestamp +
* "\",\"firstName\": \"asducup\",\"lastName\": \"sanusias\",\"phoneNumber\": \"1\",\"positionIds\": ["
* ; for (String string : positionIds) { candidateDTORequestString +=
* "\"" + string + "\","; } candidateDTORequestString =
* candidateDTORequestString.substring(0,
* candidateDTORequestString.length() - 1); candidateDTORequestString
* += "]}";
*
* FileInputStream inputFile = new FileInputStream(new
* File("src/test/resources/JSON/applyNewPositionRequest.json"));
*
* // inserting new candidate1
* beirutApiClient.insertNewCandidate(REQUEST_ID, USERNAME,
* candidateDTORequestString, "applyNewPositionRequest.json",
* IOUtils.toByteArray(inputFile)); // assign positionIds1 to the new
* candidate1 String candidateDTORequestString1 =
* "{\"emailAddress\": \"asda@egamail.com1" + timestamp +
* "\",\"firstName\": \"asducup\",\"lastName\": \"sanusias\",\"phoneNumber\": \"11\",\"positionIds\": ["
* ; for (String string : positionIds1) { candidateDTORequestString1 +=
* "\"" + string + "\","; } candidateDTORequestString1 =
* candidateDTORequestString1.substring(0,
* candidateDTORequestString1.length() - 1); candidateDTORequestString1
* += "]}";
*
* FileInputStream inputFile1 = new FileInputStream( new
* File("src/test/resources/JSON/updateCandidateStatusRequestJson.json"
* ));
*
* // inserting new candidate1
* beirutApiClient.insertNewCandidate(REQUEST_ID, USERNAME,
* candidateDTORequestString1, "updateCandidateStatusRequestJson.json",
* IOUtils.toByteArray(inputFile1));
*
* // get candidate resultCandidate =
* beirutApiClient.getAllCandidateByStoreIdWithPageable(REQUEST_ID,
* USERNAME, 0, 3);
*/
}
// UPDATED IZAL DONE
@Test
@Ignore
public void testApplyNewPosition() throws Exception {
String idCandidate = resultCandidate.getContent().get(0).getId();
// ListStringRequest listPositionIdStrings = new ListStringRequest();
List<String> listString = new ArrayList<String>();
for (PositionDTOResponse positionDTOResponse : resultPosition.getContent()) {
if (positionDTOResponse.getTitle().equals("Software Developer Division 5 " + timestamp)) {
listString.add(positionDTOResponse.getId());
}
}
ApplyNewPositionModelDTORequest applyNewPositionModelDTORequest =
new ApplyNewPositionModelDTORequest();
applyNewPositionModelDTORequest.setIdCandidate(idCandidate);
applyNewPositionModelDTORequest.setListPositionIds(listString);
GdnBaseRestResponse result =
beirutApiClient.applyNewPosition(REQUEST_ID, USERNAME, applyNewPositionModelDTORequest);
Assert.assertTrue(result.isSuccess());
GdnRestListResponse<CandidatePositionSolrDTOResponse> resultCandidatePosition =
beirutApiClient.getCandidatePositionBySolrQuery(REQUEST_ID, USERNAME,
"emailAddress:" + this.resultCandidate.getContent().get(0).getEmailAddress()
+ " AND title:Software Developer Division 5 " + timestamp,
0, 10);
Assert.assertTrue(resultCandidatePosition.getContent().size() != 0);
}
@Test
public void testDeletePosition() throws Exception {
// PositionDTORequest newPosition = new PositionDTORequest();
// newPosition.setTitle("New Title" + timestamp);
ListStringRequest listStringRequest = new ListStringRequest();
ArrayList<String> list = new ArrayList<String>();
list.add("18e914d4-d115-4aee-843a-15258263373d");
listStringRequest.setValues(list);
GdnBaseRestResponse response =
beirutApiClient.deletePosition(REQUEST_ID, USERNAME, listStringRequest);
Assert.assertTrue(response.isSuccess());
}
// UPDATED IZAL DONE
@Test
@Ignore
public void testUpdateCandidateInformation() throws Exception {
CandidateDTORequest updatedCandidate = new CandidateDTORequest();
updatedCandidate.setId(resultCandidate.getContent().get(0).getId());
String UPDATED = "Updated";
updatedCandidate
.setEmailAddress(resultCandidate.getContent().get(0).getEmailAddress() + UPDATED);
updatedCandidate.setFirstName(resultCandidate.getContent().get(0).getFirstName() + UPDATED);
updatedCandidate.setLastName(resultCandidate.getContent().get(0).getLastName() + UPDATED);
GdnBaseRestResponse response =
this.beirutApiClient.updateCandidateInformation(REQUEST_ID, USERNAME, updatedCandidate);
Assert.assertTrue(response.isSuccess());
GdnRestSingleResponse<CandidateDTOResponse> result =
this.beirutApiClient.findCandidateByEmailAddressAndStoreId(REQUEST_ID, USERNAME,
resultCandidate.getContent().get(0).getEmailAddress() + UPDATED);
Assert
.assertTrue(result.getValue().getId().equals(resultCandidate.getContent().get(0).getId()));
}
// UPDATED IZAL DONE
@Test
@Ignore
public void testUpdateCandidatesStatus() throws Exception {
String idPosition = "";
for (PositionDTOResponse positionDTOResponse : resultPosition.getContent()) {
if (positionDTOResponse.getTitle().equals("Software Developer Division 1 " + timestamp)) {
idPosition = positionDTOResponse.getId();
break;
}
}
StatusDTORequest status = StatusDTORequest.MEDICAL;
ListStringRequest listCandidateIdStrings = new ListStringRequest();
List<String> listString = new ArrayList<String>();
System.out.println("SIZE RESULTCANDIDATENYA WOI : " + resultCandidate.getContent().size());
for (CandidateDTOResponseWithoutDetail candidateDTOResponseWithoutDetail : resultCandidate
.getContent()) {
if (candidateDTOResponseWithoutDetail.getEmailAddress()
.equals("asda@egamail.com1" + timestamp)) {
listString.add(candidateDTOResponseWithoutDetail.getId());
}
} //
listCandidateIdStrings.setValues(listString);
UpdateCandidateStatusModelDTORequest updateCandidateStatusModelDTORequest =
new UpdateCandidateStatusModelDTORequest();
List<CandidatePositionBindRequest> listBind = new ArrayList<CandidatePositionBindRequest>();
for (String string : listString) {
CandidatePositionBindRequest candidatePositionBindRequest =
new CandidatePositionBindRequest();
candidatePositionBindRequest.setIdCandidate(string);
candidatePositionBindRequest.setIdPosition(idPosition);
}
updateCandidateStatusModelDTORequest.setListBind(listBind);
updateCandidateStatusModelDTORequest.setStatusDTORequest(status.name());
GdnBaseRestResponse response = beirutApiClient.updateCandidatesStatus(REQUEST_ID, USERNAME,
updateCandidateStatusModelDTORequest);
Assert.assertTrue(response.isSuccess());
GdnRestListResponse<CandidatePositionSolrDTOResponse> result =
beirutApiClient.getCandidatePositionBySolrQuery(REQUEST_ID, USERNAME,
"status:" + StatusDTORequest.MEDICAL.toString() + " AND STORE_ID:" + STORE_ID, 0, 10);
Assert.assertTrue(result.getContent().size() == 1);
}
@Test
@Ignore
public void testUpdatePosition() throws Exception {
// PositionDTORequest newPosition = new PositionDTORequest();
// newPosition.setTitle("New Title" + timestamp);
PositionDTORequest positionDTORequest = new PositionDTORequest();
positionDTORequest.setId(positionIds.get(0));
positionDTORequest.setTitle("New Title" + timestamp);
beirutApiClient.updatePositionInformation(REQUEST_ID, USERNAME, positionDTORequest);
GdnRestListResponse<PositionDTOResponse> result =
beirutApiClient.getPositionByTitle(REQUEST_ID, USERNAME, "New Title" + timestamp);
Assert.assertTrue(result.getContent().size() == 1);
}
}
| |
package Framework.GridsAndAgents;
import Framework.Interfaces.DoubleToDouble;
import Framework.Tools.PDEequations;
import Framework.Utils;
import java.io.Serializable;
import java.text.DecimalFormat;
import java.util.Arrays;
/**
* Created by Rafael on 10/24/2017.
*/
public class Grid2Ddouble extends GridBase2D implements Serializable{
double[] field;
public Grid2Ddouble(int xDim, int yDim){
super(xDim,yDim,false,false);
field =new double[this.xDim * this.yDim];
}
public Grid2Ddouble(int xDim, int yDim, boolean wrapX, boolean wrapY){
super(xDim,yDim,wrapX,wrapY);
field =new double[this.xDim * this.yDim];
}
/**
* gets the current field value at the specified index
*/
public double Get(int i){return field[i];}
public double[] GetField(){
return this.field;
}
/**
* gets the current field value at the specified coordinates
*/
public double Get(int x, int y) { return field[x*yDim+y]; }
/**
* sets the current field value at the specified index
*/
public void Set(int i, double val){
field[i]=val;}
/**
* sets the current field value at the specified coordinates
*/
public void Set(int x, int y, double val){ field[x*yDim+y]=val; }
/**
* adds to the current field value at the specified coordinates
*/
public void Add(int x, int y, double val){ field[x*yDim+y]+=val; }
public void Mul(int x, int y, double val){ field[x*yDim+y]*=val; }
public void Mul(int i, double val){ field[i]*=val; }
/**
* adds to the current field value at the specified index
*/
public void Add(int i, double val){
field[i]+=val;}
/**
* Bounds all values in the current field between min and max
*/
public void BoundAll(double min, double max){
for(int i=0;i<length;i++){
field[i]= Utils.Bound(field[i],min,max);
}
}
/**
* sets all squares in current the field to the specified value
*/
public void SetAll(double val){
Arrays.fill(field,val);
}
/**
* adds specified value to all entries of the curr field
*/
public void AddAll(double val){
for (int i = 0; i < length; i++) {
field[i]+=val;
}
}
/**
* adds specified value to all entries of the curr field
*/
public void MulAll(double val){
for (int i = 0; i < length; i++) {
field[i]*=val;
}
}
public void SetAll(double[]vals){
System.arraycopy(vals,0, field,0,length);
}
/**
* gets the average value of all squares in the current field
*/
public double GetAvg(){
double tot=0;
for(int i=0;i<length;i++){
tot+= field[i];
}
return tot/length;
}
public double GetMax(){
double max=Double.MIN_VALUE;
for (int i = 0; i < length; i++) {
max=Math.max(Get(i),max);
}
return max;
}
public double GetMin(){
double min=Double.MAX_VALUE;
for (int i = 0; i < length; i++) {
min=Math.min(Get(i),min);
}
return min;
}
public double MaxDifScaled(double[]compareTo, double denomOffset){
double maxDif=0;
for(int i = 0; i< field.length; i++){
maxDif=Math.max(maxDif,Math.abs(field[i]- compareTo[i])/ (compareTo[i]+denomOffset));
}
return maxDif;
}
public void SetOuterLayer(double val){
for (int x = 0; x < xDim; x++) {
Set(x,0,val);
Set(x,yDim-1,val);
}
for (int y = 1; y < yDim; y++) {
Set(0,y,val);
Set(xDim-1,y,val);
}
}
public String ToMatrixString(String delim, DoubleToDouble ValueTransform, int decimalDigits){
String dfStr="#.";
for (int i = 0; i < decimalDigits; i++) {
dfStr+="0";
}
DecimalFormat df=new DecimalFormat(dfStr);
StringBuilder sb = new StringBuilder();
for (int y = 0; y < yDim; y++) {
for (int x = 0; x < xDim; x++) {
if(x==xDim-1){
sb.append(df.format(ValueTransform.DoubleToDouble(Get(x,y))));
}
else{
sb.append(df.format(ValueTransform.DoubleToDouble(Get(x,y)))+delim);
}
}
sb.append("\n");
}
return sb.toString();
}
public String ToMatrixString(String delim,int decimalDigits){
String dfStr="#.";
for (int i = 0; i < decimalDigits; i++) {
dfStr+="0";
}
DecimalFormat df=new DecimalFormat(dfStr);
StringBuilder sb = new StringBuilder();
for (int x = 0; x < xDim; x++) {
for (int y = 0; y < yDim; y++) {
if(y==yDim-1){
sb.append(df.format(Get(x,y)));
}
else{
sb.append(df.format(Get(x,y))+delim);
}
}
sb.append("\n");
}
return sb.toString();
}
public String ToMatrixString(String delim){
StringBuilder sb = new StringBuilder();
for (int x = 0; x < xDim; x++) {
for (int y = 0; y < yDim; y++) {
if(y==yDim-1){
sb.append(Get(x,y));
}
else{
sb.append(Get(x,y)+delim);
}
}
sb.append("\n");
}
return sb.toString();
}
public double GradientX(int x,int y){
double left=PDEequations.DisplacedX2D(x-1,y,field,xDim,yDim,x,false,0,wrapX);
double right=PDEequations.DisplacedX2D(x+1,y,field,xDim,yDim,x,false,0,wrapX);
return right-left;
}
public double GradientY(int x,int y){
double down=PDEequations.DisplacedY2D(x,y-1,field,xDim,yDim,y,false,0,wrapY);
double up=PDEequations.DisplacedY2D(x,y+1,field,xDim,yDim,y,false,0,wrapY);
return up-down;
}
public double GradientX(int x,int y,boolean wrapX){
double left=PDEequations.DisplacedX2D(x-1,y,field,xDim,yDim,x,false,0,wrapX);
double right=PDEequations.DisplacedX2D(x+1,y,field,xDim,yDim,x,false,0,wrapX);
return right-left;
}
public double GradientY(int x,int y,boolean wrapY){
double down=PDEequations.DisplacedY2D(x,y-1,field,xDim,yDim,y,false,0,wrapY);
double up=PDEequations.DisplacedY2D(x,y+1,field,xDim,yDim,y,false,0,wrapY);
return up-down;
}
public double GradientX(int x,int y,double boundaryCond){
double left=PDEequations.DisplacedX2D(x-1,y,field,xDim,yDim,x,true,boundaryCond,wrapX);
double right=PDEequations.DisplacedX2D(x+1,y,field,xDim,yDim,x,true,boundaryCond,wrapX);
return right-left;
}
public double GradientY(int x,int y,double boundaryCond){
double down=PDEequations.DisplacedY2D(x,y-1,field,xDim,yDim,y,true,boundaryCond,wrapY);
double up=PDEequations.DisplacedY2D(x,y+1,field,xDim,yDim,y,true,boundaryCond,wrapY);
return up-down;
}
// public Grid2Ddouble DotProduct(Grid2Ddouble rightMatrix){
// if(yDim!=rightMatrix.xDim){
// throw new IllegalArgumentException("xDim of this and yDim of other must match! this.xDim: "+xDim+" other.yDim: "+yDim);
// }
// Grid2Ddouble out=new Grid2Ddouble(xDim,rightMatrix.yDim);
// for (int x = 0; x < xDim; x++) {
// for (int y = 0; y < rightMatrix.yDim; y++) {
// //fill in values by dot product
// for (int i = 0; i < yDim; i++) {
// out.Add(x,y,Get(i,y)*rightMatrix.Get(x,i));
// }
// }
// }
// return out;
// }
// public Grid2Ddouble DotProductT(Grid2Ddouble rightMatrixToTranspose){
// if(yDim!=rightMatrixToTranspose.yDim){
// throw new IllegalArgumentException("xDim of this and xDim of other must match! this.xDim: "+xDim+" other.xDim: "+yDim);
// }
// Grid2Ddouble out=new Grid2Ddouble(xDim,rightMatrixToTranspose.yDim);
// for (int x = 0; x < xDim; x++) {
// for (int y = 0; y < rightMatrixToTranspose.xDim; y++) {
// //fill in values by dot product
// for (int i = 0; i < yDim; i++) {
// out.Add(x,y,Get(i,y)*rightMatrixToTranspose.Get(i,x));
// }
// }
// }
// return out;
// }
}
| |
/*
* Copyright 2012-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.ast;
import griffon.plugins.carbonado.DefaultCarbonadoProvider;
import griffon.plugins.carbonado.CarbonadoAware;
import griffon.plugins.carbonado.CarbonadoContributionHandler;
import griffon.plugins.carbonado.CarbonadoProvider;
import lombok.core.handlers.CarbonadoAwareConstants;
import org.codehaus.groovy.ast.*;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.control.CompilePhase;
import org.codehaus.groovy.control.SourceUnit;
import org.codehaus.groovy.control.messages.SimpleMessage;
import org.codehaus.groovy.transform.GroovyASTTransformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.codehaus.griffon.ast.GriffonASTUtils.*;
/**
* Handles generation of code for the {@code @CarbonadoAware} annotation.
* <p/>
*
* @author Andres Almiray
*/
@GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION)
public class CarbonadoAwareASTTransformation extends AbstractASTTransformation implements CarbonadoAwareConstants {
private static final Logger LOG = LoggerFactory.getLogger(CarbonadoAwareASTTransformation.class);
private static final ClassNode CARBONADO_CONTRIBUTION_HANDLER_CNODE = makeClassSafe(CarbonadoContributionHandler.class);
private static final ClassNode CARBONADO_AWARE_CNODE = makeClassSafe(CarbonadoAware.class);
private static final ClassNode CARBONADO_PROVIDER_CNODE = makeClassSafe(CarbonadoProvider.class);
private static final ClassNode DEFAULT_CARBONADO_PROVIDER_CNODE = makeClassSafe(DefaultCarbonadoProvider.class);
private static final String[] DELEGATING_METHODS = new String[] {
METHOD_WITH_CARBONADO
};
static {
Arrays.sort(DELEGATING_METHODS);
}
/**
* Convenience method to see if an annotated node is {@code @CarbonadoAware}.
*
* @param node the node to check
* @return true if the node is an event publisher
*/
public static boolean hasCarbonadoAwareAnnotation(AnnotatedNode node) {
for (AnnotationNode annotation : node.getAnnotations()) {
if (CARBONADO_AWARE_CNODE.equals(annotation.getClassNode())) {
return true;
}
}
return false;
}
/**
* Handles the bulk of the processing, mostly delegating to other methods.
*
* @param nodes the ast nodes
* @param source the source unit for the nodes
*/
public void visit(ASTNode[] nodes, SourceUnit source) {
checkNodesForAnnotationAndType(nodes[0], nodes[1]);
addCarbonadoContributionIfNeeded(source, (ClassNode) nodes[1]);
}
public static void addCarbonadoContributionIfNeeded(SourceUnit source, ClassNode classNode) {
if (needsCarbonadoContribution(classNode, source)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Injecting " + CarbonadoContributionHandler.class.getName() + " into " + classNode.getName());
}
apply(classNode);
}
}
protected static boolean needsCarbonadoContribution(ClassNode declaringClass, SourceUnit sourceUnit) {
boolean found1 = false, found2 = false, found3 = false, found4 = false;
ClassNode consideredClass = declaringClass;
while (consideredClass != null) {
for (MethodNode method : consideredClass.getMethods()) {
// just check length, MOP will match it up
found1 = method.getName().equals(METHOD_WITH_CARBONADO) && method.getParameters().length == 1;
found2 = method.getName().equals(METHOD_WITH_CARBONADO) && method.getParameters().length == 2;
found3 = method.getName().equals(METHOD_SET_CARBONADO_PROVIDER) && method.getParameters().length == 1;
found4 = method.getName().equals(METHOD_GET_CARBONADO_PROVIDER) && method.getParameters().length == 0;
if (found1 && found2 && found3 && found4) {
return false;
}
}
consideredClass = consideredClass.getSuperClass();
}
if (found1 || found2 || found3 || found4) {
sourceUnit.getErrorCollector().addErrorAndContinue(
new SimpleMessage("@CarbonadoAware cannot be processed on "
+ declaringClass.getName()
+ " because some but not all of methods from " + CarbonadoContributionHandler.class.getName() + " were declared in the current class or super classes.",
sourceUnit)
);
return false;
}
return true;
}
public static void apply(ClassNode declaringClass) {
injectInterface(declaringClass, CARBONADO_CONTRIBUTION_HANDLER_CNODE);
// add field:
// protected CarbonadoProvider this$carbonadoProvider = DefaultCarbonadoProvider.instance
FieldNode providerField = declaringClass.addField(
CARBONADO_PROVIDER_FIELD_NAME,
ACC_PRIVATE | ACC_SYNTHETIC,
CARBONADO_PROVIDER_CNODE,
defaultCarbonadoProviderInstance());
// add method:
// CarbonadoProvider getCarbonadoProvider() {
// return this$carbonadoProvider
// }
injectMethod(declaringClass, new MethodNode(
METHOD_GET_CARBONADO_PROVIDER,
ACC_PUBLIC,
CARBONADO_PROVIDER_CNODE,
Parameter.EMPTY_ARRAY,
NO_EXCEPTIONS,
returns(field(providerField))
));
// add method:
// void setCarbonadoProvider(CarbonadoProvider provider) {
// this$carbonadoProvider = provider ?: DefaultCarbonadoProvider.instance
// }
injectMethod(declaringClass, new MethodNode(
METHOD_SET_CARBONADO_PROVIDER,
ACC_PUBLIC,
ClassHelper.VOID_TYPE,
params(
param(CARBONADO_PROVIDER_CNODE, PROVIDER)),
NO_EXCEPTIONS,
block(
ifs_no_return(
cmp(var(PROVIDER), ConstantExpression.NULL),
assigns(field(providerField), defaultCarbonadoProviderInstance()),
assigns(field(providerField), var(PROVIDER))
)
)
));
for (MethodNode method : CARBONADO_CONTRIBUTION_HANDLER_CNODE.getMethods()) {
if (Arrays.binarySearch(DELEGATING_METHODS, method.getName()) < 0) continue;
List<Expression> variables = new ArrayList<Expression>();
Parameter[] parameters = new Parameter[method.getParameters().length];
for (int i = 0; i < method.getParameters().length; i++) {
Parameter p = method.getParameters()[i];
parameters[i] = new Parameter(makeClassSafe(p.getType()), p.getName());
parameters[i].getType().setGenericsTypes(p.getType().getGenericsTypes());
variables.add(var(p.getName()));
}
ClassNode returnType = makeClassSafe(method.getReturnType());
returnType.setGenericsTypes(method.getReturnType().getGenericsTypes());
returnType.setGenericsPlaceHolder(method.getReturnType().isGenericsPlaceHolder());
MethodNode newMethod = new MethodNode(
method.getName(),
ACC_PUBLIC,
returnType,
parameters,
NO_EXCEPTIONS,
returns(call(
field(providerField),
method.getName(),
args(variables)))
);
newMethod.setGenericsTypes(method.getGenericsTypes());
injectMethod(declaringClass, newMethod);
}
}
private static Expression defaultCarbonadoProviderInstance() {
return call(DEFAULT_CARBONADO_PROVIDER_CNODE, "getInstance", NO_ARGS);
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search.type;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.action.SearchServiceListener;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.action.search.type.TransportSearchHelper.internalSearchRequest;
/**
*
*/
public abstract class TransportSearchTypeAction extends TransportAction<SearchRequest, SearchResponse> {
protected final ClusterService clusterService;
protected final SearchServiceTransportAction searchService;
protected final SearchPhaseController searchPhaseController;
public TransportSearchTypeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool);
this.clusterService = clusterService;
this.searchService = searchService;
this.searchPhaseController = searchPhaseController;
}
protected abstract class BaseAsyncAction<FirstResult extends SearchPhaseResult> {
protected final ActionListener<SearchResponse> listener;
protected final GroupShardsIterator shardsIts;
protected final SearchRequest request;
protected final ClusterState clusterState;
protected final DiscoveryNodes nodes;
protected final int expectedSuccessfulOps;
private final int expectedTotalOps;
protected final AtomicInteger successulOps = new AtomicInteger();
private final AtomicInteger totalOps = new AtomicInteger();
protected final AtomicArray<FirstResult> firstResults;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final Object shardFailuresMutex = new Object();
protected volatile ScoreDoc[] sortedShardList;
protected final long startTime = System.currentTimeMillis();
protected BaseAsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.clusterState = clusterService.state();
nodes = clusterState.nodes();
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
String[] concreteIndices = clusterState.metaData().concreteIndices(request.indices(), request.ignoreIndices(), true);
for (String index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index);
}
Map<String, Set<String>> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices());
shardsIts = clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference());
expectedSuccessfulOps = shardsIts.size();
// we need to add 1 for non active partition, since we count it in the total!
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
firstResults = new AtomicArray<FirstResult>(shardsIts.size());
}
public void start() {
if (expectedSuccessfulOps == 0) {
// no search shards to search on, bail with empty response (it happens with search across _all with no indices around and consistent with broadcast operations)
listener.onResponse(new SearchResponse(InternalSearchResponse.EMPTY, null, 0, 0, System.currentTimeMillis() - startTime, ShardSearchFailure.EMPTY_ARRAY));
return;
}
request.beforeStart();
// count the local operations, and perform the non local ones
int localOperations = 0;
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
localOperations++;
} else {
// do the remote operation here, the localAsync flag is not relevant
performFirstPhase(shardIndex, shardIt);
}
} else {
// really, no shards active in this group
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
}
}
// we have local operations, perform them now
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
request.beforeLocalFork();
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
performFirstPhase(shardIndex, shardIt);
}
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
if (localAsync) {
request.beforeLocalFork();
}
shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final int fShardIndex = shardIndex;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
if (localAsync) {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(fShardIndex, shardIt);
}
});
} catch (Throwable t) {
onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, t);
}
} else {
performFirstPhase(fShardIndex, shardIt);
}
}
}
}
}
}
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt) {
performFirstPhase(shardIndex, shardIt, shardIt.nextOrNull());
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt, final ShardRouting shard) {
if (shard == null) {
// no more active shards... (we should not really get here, but just for safety)
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
final DiscoveryNode node = nodes.get(shard.currentNodeId());
if (node == null) {
onFirstPhaseResult(shardIndex, shard, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
String[] filteringAliases = clusterState.metaData().filteringAliases(shard.index(), request.indices());
sendExecuteFirstPhase(node, internalSearchRequest(shard, shardsIts.size(), request, filteringAliases, startTime), new SearchServiceListener<FirstResult>() {
@Override
public void onResult(FirstResult result) {
onFirstPhaseResult(shardIndex, shard, result, shardIt);
}
@Override
public void onFailure(Throwable t) {
onFirstPhaseResult(shardIndex, shard, node.id(), shardIt, t);
}
});
}
}
}
void onFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result, ShardIterator shardIt) {
result.shardTarget(new SearchShardTarget(shard.currentNodeId(), shard.index(), shard.id()));
processFirstPhaseResult(shardIndex, shard, result);
// increment all the "future" shards to update the total ops since we some may work and some may not...
// and when that happens, we break on total ops, so we must maintain them
int xTotalOps = totalOps.addAndGet(shardIt.remaining() + 1);
successulOps.incrementAndGet();
if (xTotalOps == expectedTotalOps) {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
if (logger.isDebugEnabled()) {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "] while moving to second phase", e);
}
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
}
void onFirstPhaseResult(final int shardIndex, @Nullable ShardRouting shard, @Nullable String nodeId, final ShardIterator shardIt, Throwable t) {
// we always add the shard failure for a specific shard instance
// we do make sure to clean it on a successful response from a shard
SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId().getIndex(), shardIt.shardId().getId());
addShardFailure(shardIndex, shardTarget, t);
if (totalOps.incrementAndGet() == expectedTotalOps) {
if (logger.isDebugEnabled()) {
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
if (shard != null) {
logger.debug(shard.shortSummary() + ": Failed to execute [" + request + "]", t);
} else {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "]", t);
}
}
}
if (successulOps.get() == 0) {
if (logger.isDebugEnabled()) {
logger.debug("All shards failed for phase: [{}]", firstPhaseName(), t);
}
// no successful ops, raise an exception
listener.onFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", buildShardFailures()));
} else {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
} else {
ShardRouting nextShard = shardIt.nextOrNull();
if (nextShard != null) {
// trace log this exception
if (logger.isTraceEnabled()) {
if (t != null) {
if (shard != null) {
logger.trace(shard.shortSummary() + ": Failed to execute [" + request + "]", t);
} else {
logger.trace(shardIt.shardId() + ": Failed to execute [" + request + "]", t);
}
}
}
performFirstPhase(shardIndex, shardIt, nextShard);
} else {
// no more shards active, add a failure
if (logger.isDebugEnabled()) {
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
if (shard != null) {
logger.debug(shard.shortSummary() + ": Failed to execute [" + request + "]", t);
} else {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "]", t);
}
}
}
}
}
}
/**
* Builds how long it took to execute the search.
*/
protected final long buildTookInMillis() {
return System.currentTimeMillis() - startTime;
}
protected final ShardSearchFailure[] buildShardFailures() {
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
protected final void addShardFailure(final int shardIndex, @Nullable SearchShardTarget shardTarget, Throwable t) {
// we don't aggregate shard failures on non active shards (but do keep the header counts right)
if (TransportActions.isShardNotAvailableException(t)) {
return;
}
// lazily create shard failures, so we can early build the empty shard failure list in most cases (no failures)
if (shardFailures == null) {
synchronized (shardFailuresMutex) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(shardsIts.size());
}
}
}
ShardSearchFailure failure = shardFailures.get(shardIndex);
if (failure == null) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
} else {
// the failure is already present, try and not override it with an exception that is less meaningless
// for example, getting illegal shard state
if (TransportActions.isReadOverrideException(t)) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
}
}
}
/**
* Releases shard targets that are not used in the docsIdsToLoad.
*/
protected void releaseIrrelevantSearchContexts(AtomicArray<? extends QuerySearchResultProvider> queryResults,
AtomicArray<IntArrayList> docIdsToLoad) {
if (docIdsToLoad == null) {
return;
}
// we only release search context that we did not fetch from if we are not scrolling
if (request.scroll() == null) {
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults.asList()) {
if (docIdsToLoad.get(entry.index) == null) {
DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId());
if (node != null) { // should not happen (==null) but safeguard anyhow
searchService.sendFreeContext(node, entry.value.queryResult().id(), request);
}
}
}
}
}
protected abstract void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<FirstResult> listener);
protected final void processFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result) {
firstResults.set(shardIndex, result);
// clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level
// so its ok concurrency wise to miss potentially the shard failures being created because of another failure
// in the #addShardFailure, because by definition, it will happen on *another* shardIndex
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures != null) {
shardFailures.set(shardIndex, null);
}
}
final void innerMoveToSecondPhase() throws Exception {
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
boolean hadOne = false;
for (int i = 0; i < firstResults.length(); i++) {
FirstResult result = firstResults.get(i);
if (result == null) {
continue; // failure
}
if (hadOne) {
sb.append(",");
} else {
hadOne = true;
}
sb.append(result.shardTarget());
}
logger.trace("Moving to second phase, based on results from: {} (cluster state version: {})", sb, clusterState.version());
}
moveToSecondPhase();
}
protected abstract void moveToSecondPhase() throws Exception;
protected abstract String firstPhaseName();
}
}
| |
/*================================================================================
Copyright (c) 2008 VMware, Inc. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25.mo;
import java.rmi.RemoteException;
import com.vmware.vim25.InvalidProperty;
import com.vmware.vim25.InvalidState;
import com.vmware.vim25.LocalizableMessage;
import com.vmware.vim25.LocalizedMethodFault;
import com.vmware.vim25.ManagedObjectReference;
import com.vmware.vim25.MethodFault;
import com.vmware.vim25.OutOfBounds;
import com.vmware.vim25.RuntimeFault;
import com.vmware.vim25.TaskInfo;
import com.vmware.vim25.TaskInfoState;
/**
* The managed object class corresponding to the one defined in VI SDK API reference.
* @author Steve JIN (http://www.doublecloud.org)
*/
public class Task extends ExtensibleManagedObject
{
public static final String PROPNAME_INFO = "info";
public static final String SUCCESS = "success";
public Task(ServerConnection serverConnection, ManagedObjectReference mor)
{
super(serverConnection, mor);
}
public TaskInfo getTaskInfo() throws InvalidProperty, RuntimeFault, RemoteException
{
return (TaskInfo) getCurrentProperty(PROPNAME_INFO);
}
public ManagedEntity getAssociatedManagedEntity()
{
return (ManagedEntity) getManagedObject("info.entity");
}
public ManagedEntity[] getLockedManagedEntities()
{
return (ManagedEntity[]) getManagedObjects("info.locked");
}
public void cancelTask() throws RuntimeFault, RemoteException
{
getVimService().cancelTask(getMOR());
}
public void setTaskState(TaskInfoState tis, Object result, LocalizedMethodFault fault) throws InvalidState, RuntimeFault, RemoteException
{
getVimService().setTaskState(getMOR(), tis, result, fault);
}
public void updateProgress(int percentDone) throws InvalidState, OutOfBounds, RuntimeFault, RemoteException
{
getVimService().updateProgress(getMOR(), percentDone);
}
/** @since SDK4.0 */
public void setTaskDescription(LocalizableMessage description) throws RuntimeFault, RemoteException
{
getVimService().setTaskDescription(getMOR(), description);
}
/**
* If there is another thread or client calling waitForUpdate(), the behavior of this
* method is not predictable. This usually happens with VI Client plug-in which shares
* the session with the VI Client which use waitForUpdate() extensively.
* The safer way is to poll the related info.state and check its value.
* @return
* @throws InvalidProperty
* @throws RuntimeFault
* @throws RemoteException
* @deprecated
*/
public String waitForMe() throws InvalidProperty, RuntimeFault, RemoteException
{
Object[] result = waitForValues(
new String[] { "info.state", "info.error" },
new String[] { "state" },
new Object[][] { new Object[] { TaskInfoState.success, TaskInfoState.error } });
if (result[0].equals(TaskInfoState.success))
{
return SUCCESS;
}
else
{
TaskInfo tinfo = (TaskInfo) getCurrentProperty(PROPNAME_INFO);
LocalizedMethodFault fault = tinfo.getError();
String error = "Error Occured";
if(fault!=null)
{
MethodFault mf = fault.getFault();
throw mf;
}
return error;
}
}
/**
* Copyright 2009 NetApp, contribution by Eric Forgette
*
* This is a "drop-in" replacement for waitForMe() that uses a timed polling
* in place of waitForValues.
*
* This method will eat 3 exceptions while trying to get TaskInfo and TaskState.
* On the fourth try, the captured exception is thrown.
*
* @return String based on TaskInfoState
* @throws RuntimeFault
* @throws RemoteException
* @throws InterruptedException
* @throws RuntimeException if the third exception is not RuntimeFault or RemoteException
*
* @author Eric Forgette (forgette@netapp.com)
*/
public String waitForTask() throws RuntimeFault, RemoteException, InterruptedException
{
return waitForTask(500,1000);
}
/**
* Copyright 2009 NetApp, contribution by Eric Forgette
*
* This is a replacement for waitForMe() that uses a timed polling
* in place of waitForValues. The delay between each poll is
* configurable based on the last seen task state. The method will sleep
* for the number of milliseconds specified in runningDelayInMillSecond
* while the task is in the running state.
* The method will sleep for the number of milliseconds specified
* in queuedDelayInMillSecond while the task is in the queued state.
*
* This method will eat 3 exceptions while trying to get TaskInfo and TaskState.
* On the fourth try, the captured exception is thrown.
*
* @param runningDelayInMillSecond - number of milliseconds to sleep between polls for a running task
* @param queuedDelayInMillSecond - number of milliseconds to sleep between polls for a queued task
* @return String based on TaskInfoState
* @throws RuntimeFault
* @throws RemoteException
* @throws InterruptedException
* @throws RuntimeException if the third exception is not RuntimeFault or RemoteException
*
* @author Eric Forgette (forgette@netapp.com)
*/
public String waitForTask(int runningDelayInMillSecond, int queuedDelayInMillSecond) throws RuntimeFault, RemoteException, InterruptedException
{
TaskInfoState tState = null;
int tries = 0;
int maxTries=3;
Exception getInfoException = null;
while((tState == null) || tState.equals(TaskInfoState.running) || tState.equals(TaskInfoState.queued))
{
tState = null;
getInfoException = null;
tries=0;
// under load getTaskInfo may return null when there really is valid task info, so we try 3 times to get it.
while (tState==null)
{
tries++;
if (tries > maxTries)
{
if (getInfoException == null)
{
throw new NullPointerException();
}
else if (getInfoException instanceof RuntimeFault)
{
throw (RuntimeFault) getInfoException;
}
else if (getInfoException instanceof RemoteException )
{
throw (RemoteException) getInfoException;
}
else
{
throw new RuntimeException(getInfoException);
}
}
try
{
tState = getTaskInfo().getState();
}
catch (Exception e)
{
//silently catch 3 exceptions
getInfoException=e;
}
}
// sleep for a specified time based on task state.
if (tState.equals(TaskInfoState.running))
{
Thread.sleep(runningDelayInMillSecond);
}
else
{
Thread.sleep(queuedDelayInMillSecond);
}
}
return tState.toString();
}
}
| |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andres Almiray
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kordamp.ikonli.material2;
import org.kordamp.ikonli.Ikon;
/**
* @author Andres Almiray
*/
public enum Material2OutlinedMZ implements Ikon {
MAIL("mdomz-mail", '\ue3de'),
MAIL_OUTLINE("mdomz-mail_outline", '\ue3e0'),
MAP("mdomz-map", '\ue3e1'),
MAPS_UGC("mdomz-maps_ugc", '\ue7c8'),
MARK_CHAT_READ("mdomz-mark_chat_read", '\ue7ca'),
MARK_CHAT_UNREAD("mdomz-mark_chat_unread", '\ue7cc'),
MARK_EMAIL_READ("mdomz-mark_email_read", '\ue7ce'),
MARK_EMAIL_UNREAD("mdomz-mark_email_unread", '\ue7d0'),
MARKUNREAD("mdomz-markunread", '\ue3e3'),
MARKUNREAD_MAILBOX("mdomz-markunread_mailbox", '\ue3e5'),
MASKS("mdomz-masks", '\ue8ab'),
MAXIMIZE("mdomz-maximize", '\ue3e7'),
MEDIATION("mdomz-mediation", '\ue7d2'),
MEDICAL_SERVICES("mdomz-medical_services", '\ue7d3'),
MEETING_ROOM("mdomz-meeting_room", '\ue3e8'),
MEMORY("mdomz-memory", '\ue3ea'),
MENU("mdomz-menu", '\ue3ec'),
MENU_BOOK("mdomz-menu_book", '\ue3ed'),
MENU_OPEN("mdomz-menu_open", '\ue3ef'),
MERGE_TYPE("mdomz-merge_type", '\ue3f0'),
MESSAGE("mdomz-message", '\ue3f1'),
MIC("mdomz-mic", '\ue3f3'),
MIC_NONE("mdomz-mic_none", '\ue3f5'),
MIC_OFF("mdomz-mic_off", '\ue3f7'),
MICROWAVE("mdomz-microwave", '\ue879'),
MILITARY_TECH("mdomz-military_tech", '\ue7d5'),
MINIMIZE("mdomz-minimize", '\ue3f9'),
MINUS("mdomz-minus", '\ue3fa'),
MISCELLANEOUS_SERVICES("mdomz-miscellaneous_services", '\ue7d7'),
MISSED_VIDEO_CALL("mdomz-missed_video_call", '\ue3fb'),
MMS("mdomz-mms", '\ue3fd'),
MOBILE_FRIENDLY("mdomz-mobile_friendly", '\ue3ff'),
MOBILE_OFF("mdomz-mobile_off", '\ue400'),
MOBILE_SCREEN_SHARE("mdomz-mobile_screen_share", '\ue401'),
MODE_COMMENT("mdomz-mode_comment", '\ue403'),
MODEL_TRAINING("mdomz-model_training", '\ue7d8'),
MONETIZATION_ON("mdomz-monetization_on", '\ue405'),
MONEY("mdomz-money", '\ue407'),
MONEY_OFF("mdomz-money_off", '\ue409'),
MONOCHROME_PHOTOS("mdomz-monochrome_photos", '\ue40a'),
MOOD("mdomz-mood", '\ue40c'),
MOOD_BAD("mdomz-mood_bad", '\ue40e'),
MOPED("mdomz-moped", '\ue7d9'),
MORE("mdomz-more", '\ue410'),
MORE_HORIZ("mdomz-more_horiz", '\ue412'),
MORE_TIME("mdomz-more_time", '\ue7db'),
MORE_VERT("mdomz-more_vert", '\ue413'),
MOTION_PHOTOS_ON("mdomz-motion_photos_on", '\ue8ad'),
MOTION_PHOTOS_PAUSE("mdomz-motion_photos_pause", '\ue8bd'),
MOTION_PHOTOS_PAUSED("mdomz-motion_photos_paused", '\ue8ae'),
MOTORCYCLE("mdomz-motorcycle", '\ue414'),
MOUSE("mdomz-mouse", '\ue416'),
MOVE_TO_INBOX("mdomz-move_to_inbox", '\ue418'),
MOVIE("mdomz-movie", '\ue41a'),
MOVIE_CREATION("mdomz-movie_creation", '\ue41c'),
MOVIE_FILTER("mdomz-movie_filter", '\ue41e'),
MULTILINE_CHART("mdomz-multiline_chart", '\ue420'),
MULTIPLE_STOP("mdomz-multiple_stop", '\ue7dc'),
MUSEUM("mdomz-museum", '\ue421'),
MUSIC_NOTE("mdomz-music_note", '\ue423'),
MUSIC_OFF("mdomz-music_off", '\ue425'),
MUSIC_VIDEO("mdomz-music_video", '\ue427'),
MY_LOCATION("mdomz-my_location", '\ue429'),
NAT("mdomz-nat", '\ue7dd'),
NATURE("mdomz-nature", '\ue42b'),
NATURE_PEOPLE("mdomz-nature_people", '\ue42d'),
NAVIGATE_BEFORE("mdomz-navigate_before", '\ue42f'),
NAVIGATE_NEXT("mdomz-navigate_next", '\ue430'),
NAVIGATION("mdomz-navigation", '\ue431'),
NEAR_ME("mdomz-near_me", '\ue433'),
NEAR_ME_DISABLED("mdomz-near_me_disabled", '\ue87b'),
NETWORK_CELL("mdomz-network_cell", '\ue435'),
NETWORK_CHECK("mdomz-network_check", '\ue437'),
NETWORK_LOCKED("mdomz-network_locked", '\ue438'),
NETWORK_WIFI("mdomz-network_wifi", '\ue439'),
NEW_RELEASES("mdomz-new_releases", '\ue43b'),
NEXT_PLAN("mdomz-next_plan", '\ue7df'),
NEXT_WEEK("mdomz-next_week", '\ue43d'),
NFC("mdomz-nfc", '\ue43f'),
NIGHT_SHELTER("mdomz-night_shelter", '\ue87d'),
NIGHTS_STAY("mdomz-nights_stay", '\ue440'),
NO_BACKPACK("mdomz-no_backpack", '\ue8be'),
NO_CELL("mdomz-no_cell", '\ue7e1'),
NO_DRINKS("mdomz-no_drinks", '\ue7e3'),
NO_ENCRYPTION("mdomz-no_encryption", '\ue442'),
NO_FLASH("mdomz-no_flash", '\ue7e5'),
NO_FOOD("mdomz-no_food", '\ue7e7'),
NO_LUGGAGE("mdomz-no_luggage", '\ue8c0'),
NO_MEALS("mdomz-no_meals", '\ue87f'),
NO_MEETING_ROOM("mdomz-no_meeting_room", '\ue444'),
NO_PHOTOGRAPHY("mdomz-no_photography", '\ue7e9'),
NO_SIM("mdomz-no_sim", '\ue446'),
NO_STROLLER("mdomz-no_stroller", '\ue7eb'),
NO_TRANSFER("mdomz-no_transfer", '\ue880'),
NORTH("mdomz-north", '\ue882'),
NORTH_EAST("mdomz-north_east", '\ue883'),
NORTH_WEST("mdomz-north_west", '\ue884'),
NOT_ACCESSIBLE("mdomz-not_accessible", '\ue7ed'),
NOT_EQUAL("mdomz-not_equal", '\ue448'),
NOT_INTERESTED("mdomz-not_interested", '\ue449'),
NOT_LISTED_LOCATION("mdomz-not_listed_location", '\ue44a'),
NOT_STARTED("mdomz-not_started", '\ue7ee'),
NOTE("mdomz-note", '\ue44c'),
NOTE_ADD("mdomz-note_add", '\ue44e'),
NOTES("mdomz-notes", '\ue450'),
NOTIFICATION_IMPORTANT("mdomz-notification_important", '\ue451'),
NOTIFICATIONS("mdomz-notifications", '\ue453'),
NOTIFICATIONS_ACTIVE("mdomz-notifications_active", '\ue455'),
NOTIFICATIONS_NONE("mdomz-notifications_none", '\ue457'),
NOTIFICATIONS_OFF("mdomz-notifications_off", '\ue459'),
NOTIFICATIONS_PAUSED("mdomz-notifications_paused", '\ue45b'),
OFFLINE_BOLT("mdomz-offline_bolt", '\ue45d'),
OFFLINE_PIN("mdomz-offline_pin", '\ue45f'),
ONDEMAND_VIDEO("mdomz-ondemand_video", '\ue461'),
ONLINE_PREDICTION("mdomz-online_prediction", '\ue7f0'),
OPACITY("mdomz-opacity", '\ue463'),
OPEN_IN_BROWSER("mdomz-open_in_browser", '\ue465'),
OPEN_IN_FULL("mdomz-open_in_full", '\ue7f1'),
OPEN_IN_NEW("mdomz-open_in_new", '\ue466'),
OPEN_WITH("mdomz-open_with", '\ue467'),
OUTBOND("mdomz-outbond", '\ue8c2'),
OUTDOOR_GRILL("mdomz-outdoor_grill", '\ue468'),
OUTLET("mdomz-outlet", '\ue7f2'),
OUTLINED_FLAG("mdomz-outlined_flag", '\ue46a'),
PAGES("mdomz-pages", '\ue46b'),
PAGEVIEW("mdomz-pageview", '\ue46d'),
PALETTE("mdomz-palette", '\ue46f'),
PAN_TOOL("mdomz-pan_tool", '\ue471'),
PANORAMA("mdomz-panorama", '\ue473'),
PANORAMA_FISH_EYE("mdomz-panorama_fish_eye", '\ue475'),
PANORAMA_HORIZONTAL("mdomz-panorama_horizontal", '\ue477'),
PANORAMA_VERTICAL("mdomz-panorama_vertical", '\ue479'),
PANORAMA_WIDE_ANGLE("mdomz-panorama_wide_angle", '\ue47b'),
PARTY_MODE("mdomz-party_mode", '\ue47d'),
PAUSE("mdomz-pause", '\ue47f'),
PAUSE_CIRCLE_FILLED("mdomz-pause_circle_filled", '\ue480'),
PAUSE_CIRCLE_OUTLINE("mdomz-pause_circle_outline", '\ue482'),
PAUSE_PRESENTATION("mdomz-pause_presentation", '\ue483'),
PAYMENT("mdomz-payment", '\ue485'),
PAYMENTS("mdomz-payments", '\ue7f4'),
PEDAL_BIKE("mdomz-pedal_bike", '\ue7f6'),
PENDING("mdomz-pending", '\ue7f7'),
PENDING_ACTIONS("mdomz-pending_actions", '\ue7f9'),
PEOPLE("mdomz-people", '\ue487'),
PEOPLE_ALT("mdomz-people_alt", '\ue489'),
PEOPLE_OUTLINE("mdomz-people_outline", '\ue48b'),
PERCENTAGE("mdomz-percentage", '\ue48d'),
PERM_CAMERA_MIC("mdomz-perm_camera_mic", '\ue48f'),
PERM_CONTACT_CALENDAR("mdomz-perm_contact_calendar", '\ue491'),
PERM_DATA_SETTING("mdomz-perm_data_setting", '\ue493'),
PERM_DEVICE_INFORMATION("mdomz-perm_device_information", '\ue494'),
PERM_IDENTITY("mdomz-perm_identity", '\ue496'),
PERM_MEDIA("mdomz-perm_media", '\ue498'),
PERM_PHONE_MSG("mdomz-perm_phone_msg", '\ue49a'),
PERM_SCAN_WIFI("mdomz-perm_scan_wifi", '\ue49c'),
PERSON("mdomz-person", '\ue49e'),
PERSON_ADD("mdomz-person_add", '\ue4a0'),
PERSON_ADD_ALT_1("mdomz-person_add_alt_1", '\ue7fb'),
PERSON_ADD_DISABLED("mdomz-person_add_disabled", '\ue4a2'),
PERSON_OUTLINE("mdomz-person_outline", '\ue4a4'),
PERSON_PIN("mdomz-person_pin", '\ue4a6'),
PERSON_PIN_CIRCLE("mdomz-person_pin_circle", '\ue4a8'),
PERSON_REMOVE("mdomz-person_remove", '\ue7fd'),
PERSON_REMOVE_ALT_1("mdomz-person_remove_alt_1", '\ue7ff'),
PERSON_SEARCH("mdomz-person_search", '\ue801'),
PERSONAL_VIDEO("mdomz-personal_video", '\ue4aa'),
PEST_CONTROL("mdomz-pest_control", '\ue803'),
PEST_CONTROL_RODENT("mdomz-pest_control_rodent", '\ue805'),
PETS("mdomz-pets", '\ue4ac'),
PHONE("mdomz-phone", '\ue4ad'),
PHONE_ANDROID("mdomz-phone_android", '\ue4af'),
PHONE_BLUETOOTH_SPEAKER("mdomz-phone_bluetooth_speaker", '\ue4b1'),
PHONE_CALLBACK("mdomz-phone_callback", '\ue4b3'),
PHONE_DISABLED("mdomz-phone_disabled", '\ue4b5'),
PHONE_ENABLED("mdomz-phone_enabled", '\ue4b6'),
PHONE_FORWARDED("mdomz-phone_forwarded", '\ue4b7'),
PHONE_IN_TALK("mdomz-phone_in_talk", '\ue4b9'),
PHONE_IPHONE("mdomz-phone_iphone", '\ue4bb'),
PHONE_LOCKED("mdomz-phone_locked", '\ue4bd'),
PHONE_MISSED("mdomz-phone_missed", '\ue4bf'),
PHONE_PAUSED("mdomz-phone_paused", '\ue4c1'),
PHONELINK("mdomz-phonelink", '\ue4c3'),
PHONELINK_ERASE("mdomz-phonelink_erase", '\ue4c5'),
PHONELINK_LOCK("mdomz-phonelink_lock", '\ue4c6'),
PHONELINK_OFF("mdomz-phonelink_off", '\ue4c7'),
PHONELINK_RING("mdomz-phonelink_ring", '\ue4c9'),
PHONELINK_SETUP("mdomz-phonelink_setup", '\ue4cb'),
PHOTO("mdomz-photo", '\ue4cc'),
PHOTO_ALBUM("mdomz-photo_album", '\ue4ce'),
PHOTO_CAMERA("mdomz-photo_camera", '\ue4d0'),
PHOTO_FILTER("mdomz-photo_filter", '\ue4d2'),
PHOTO_LIBRARY("mdomz-photo_library", '\ue4d3'),
PHOTO_SIZE_SELECT_ACTUAL("mdomz-photo_size_select_actual", '\ue4d5'),
PHOTO_SIZE_SELECT_LARGE("mdomz-photo_size_select_large", '\ue4d7'),
PHOTO_SIZE_SELECT_SMALL("mdomz-photo_size_select_small", '\ue4d8'),
PICTURE_AS_PDF("mdomz-picture_as_pdf", '\ue4d9'),
PICTURE_IN_PICTURE("mdomz-picture_in_picture", '\ue4db'),
PICTURE_IN_PICTURE_ALT("mdomz-picture_in_picture_alt", '\ue4dd'),
PIE_CHART("mdomz-pie_chart", '\ue4df'),
PIN("mdomz-pin", '\ue4e1'),
PIN_DROP("mdomz-pin_drop", '\ue4e3'),
PIN_OFF("mdomz-pin_off", '\ue4e5'),
PLACE("mdomz-place", '\ue4e7'),
PLAGIARISM("mdomz-plagiarism", '\ue807'),
PLAY_ARROW("mdomz-play_arrow", '\ue4e9'),
PLAY_CIRCLE_FILLED("mdomz-play_circle_filled", '\ue4eb'),
PLAY_CIRCLE_FILLED_WHITE("mdomz-play_circle_filled_white", '\ue4ed'),
PLAY_CIRCLE_OUTLINE("mdomz-play_circle_outline", '\ue4ef'),
PLAY_FOR_WORK("mdomz-play_for_work", '\ue4f0'),
PLAYLIST_ADD("mdomz-playlist_add", '\ue4f1'),
PLAYLIST_ADD_CHECK("mdomz-playlist_add_check", '\ue4f2'),
PLAYLIST_PLAY("mdomz-playlist_play", '\ue4f3'),
PLUMBING("mdomz-plumbing", '\ue809'),
PLUS("mdomz-plus", '\ue4f4'),
PLUS_MINUS("mdomz-plus_minus", '\ue4f5'),
PLUS_MINUS_ALT("mdomz-plus_minus_alt", '\ue4f6'),
PLUS_ONE("mdomz-plus_one", '\ue4f7'),
POINT_OF_SALE("mdomz-point_of_sale", '\ue80a'),
POLICY("mdomz-policy", '\ue4f8'),
POLL("mdomz-poll", '\ue4fa'),
POLYMER("mdomz-polymer", '\ue4fc'),
POOL("mdomz-pool", '\ue4fd'),
PORTABLE_WIFI_OFF("mdomz-portable_wifi_off", '\ue4ff'),
PORTRAIT("mdomz-portrait", '\ue500'),
POST_ADD("mdomz-post_add", '\ue502'),
POWER("mdomz-power", '\ue503'),
POWER_INPUT("mdomz-power_input", '\ue505'),
POWER_OFF("mdomz-power_off", '\ue506'),
POWER_SETTINGS_NEW("mdomz-power_settings_new", '\ue508'),
PREGNANT_WOMAN("mdomz-pregnant_woman", '\ue509'),
PRESENT_TO_ALL("mdomz-present_to_all", '\ue50a'),
PREVIEW("mdomz-preview", '\ue80c'),
PRINT("mdomz-print", '\ue50c'),
PRINT_DISABLED("mdomz-print_disabled", '\ue50e'),
PRIORITY_HIGH("mdomz-priority_high", '\ue510'),
PRIVACY_TIP("mdomz-privacy_tip", '\ue80e'),
PSYCHOLOGY("mdomz-psychology", '\ue810'),
PUBLIC("mdomz-public", '\ue511'),
PUBLIC_OFF("mdomz-public_off", '\ue812'),
PUBLISH("mdomz-publish", '\ue513'),
PUBLISHED_WITH_CHANGES("mdomz-published_with_changes", '\ue8c4'),
PUSH_PIN("mdomz-push_pin", '\ue814'),
QR_CODE("mdomz-qr_code", '\ue816'),
QR_CODE_2("mdomz-qr_code_2", '\ue8d1'),
QR_CODE_SCANNER("mdomz-qr_code_scanner", '\ue885'),
QRCODE("mdomz-qrcode", '\ue515'),
QUERY_BUILDER("mdomz-query_builder", '\ue517'),
QUESTION_ANSWER("mdomz-question_answer", '\ue519'),
QUEUE("mdomz-queue", '\ue51b'),
QUEUE_MUSIC("mdomz-queue_music", '\ue51d'),
QUEUE_PLAY_NEXT("mdomz-queue_play_next", '\ue51f'),
QUICKREPLY("mdomz-quickreply", '\ue818'),
RADIO("mdomz-radio", '\ue520'),
RADIO_BUTTON_CHECKED("mdomz-radio_button_checked", '\ue522'),
RADIO_BUTTON_UNCHECKED("mdomz-radio_button_unchecked", '\ue523'),
RATE_REVIEW("mdomz-rate_review", '\ue524'),
READ_MORE("mdomz-read_more", '\ue81a'),
RECEIPT("mdomz-receipt", '\ue526'),
RECEIPT_LONG("mdomz-receipt_long", '\ue81b'),
RECENT_ACTORS("mdomz-recent_actors", '\ue528'),
RECORD_VOICE_OVER("mdomz-record_voice_over", '\ue52a'),
REDEEM("mdomz-redeem", '\ue52c'),
REDO("mdomz-redo", '\ue52e'),
REDUCE_CAPACITY("mdomz-reduce_capacity", '\ue8af'),
REFRESH("mdomz-refresh", '\ue52f'),
REMOVE("mdomz-remove", '\ue530'),
REMOVE_CIRCLE("mdomz-remove_circle", '\ue531'),
REMOVE_CIRCLE_OUTLINE("mdomz-remove_circle_outline", '\ue533'),
REMOVE_FROM_QUEUE("mdomz-remove_from_queue", '\ue534'),
REMOVE_RED_EYE("mdomz-remove_red_eye", '\ue536'),
REMOVE_SHOPPING_CART("mdomz-remove_shopping_cart", '\ue538'),
REORDER("mdomz-reorder", '\ue53a'),
REPEAT("mdomz-repeat", '\ue53b'),
REPEAT_ONE("mdomz-repeat_one", '\ue53c'),
REPLAY("mdomz-replay", '\ue53d'),
REPLAY_10("mdomz-replay_10", '\ue53e'),
REPLAY_30("mdomz-replay_30", '\ue53f'),
REPLAY_5("mdomz-replay_5", '\ue540'),
REPLY("mdomz-reply", '\ue541'),
REPLY_ALL("mdomz-reply_all", '\ue542'),
REPORT("mdomz-report", '\ue543'),
REPORT_OFF("mdomz-report_off", '\ue545'),
REPORT_PROBLEM("mdomz-report_problem", '\ue547'),
REQUEST_PAGE("mdomz-request_page", '\ue8c5'),
REQUEST_QUOTE("mdomz-request_quote", '\ue81d'),
RESTAURANT("mdomz-restaurant", '\ue549'),
RESTAURANT_MENU("mdomz-restaurant_menu", '\ue54a'),
RESTORE("mdomz-restore", '\ue54b'),
RESTORE_FROM_TRASH("mdomz-restore_from_trash", '\ue54c'),
RESTORE_PAGE("mdomz-restore_page", '\ue54e'),
RICE_BOWL("mdomz-rice_bowl", '\ue886'),
RING_VOLUME("mdomz-ring_volume", '\ue550'),
ROCKET("mdomz-rocket", '\ue552'),
ROOFING("mdomz-roofing", '\ue888'),
ROOM("mdomz-room", '\ue554'),
ROOM_PREFERENCES("mdomz-room_preferences", '\ue81f'),
ROOM_SERVICE("mdomz-room_service", '\ue556'),
ROTATE_90_DEGREES_CCW("mdomz-rotate_90_degrees_ccw", '\ue558'),
ROTATE_LEFT("mdomz-rotate_left", '\ue55a'),
ROTATE_RIGHT("mdomz-rotate_right", '\ue55b'),
ROUNDED_CORNER("mdomz-rounded_corner", '\ue55c'),
ROUTER("mdomz-router", '\ue55d'),
ROWING("mdomz-rowing", '\ue55f'),
RSS_FEED("mdomz-rss_feed", '\ue560'),
RULE("mdomz-rule", '\ue821'),
RULE_FOLDER("mdomz-rule_folder", '\ue822'),
RUN_CIRCLE("mdomz-run_circle", '\ue824'),
RV_HOOKUP("mdomz-rv_hookup", '\ue561'),
SANITIZER("mdomz-sanitizer", '\ue8b0'),
SATELLITE("mdomz-satellite", '\ue563'),
SAVE("mdomz-save", '\ue565'),
SAVE_ALT("mdomz-save_alt", '\ue567'),
SCANNER("mdomz-scanner", '\ue568'),
SCATTER_PLOT("mdomz-scatter_plot", '\ue56a'),
SCHEDULE("mdomz-schedule", '\ue56c'),
SCHOOL("mdomz-school", '\ue56e'),
SCIENCE("mdomz-science", '\ue826'),
SCORE("mdomz-score", '\ue570'),
SCREEN_LOCK_LANDSCAPE("mdomz-screen_lock_landscape", '\ue572'),
SCREEN_LOCK_PORTRAIT("mdomz-screen_lock_portrait", '\ue574'),
SCREEN_LOCK_ROTATION("mdomz-screen_lock_rotation", '\ue576'),
SCREEN_ROTATION("mdomz-screen_rotation", '\ue577'),
SCREEN_SHARE("mdomz-screen_share", '\ue579'),
SD_CARD("mdomz-sd_card", '\ue57b'),
SD_STORAGE("mdomz-sd_storage", '\ue57d'),
SEARCH("mdomz-search", '\ue57f'),
SEARCH_OFF("mdomz-search_off", '\ue828'),
SECURITY("mdomz-security", '\ue580'),
SELECT_ALL("mdomz-select_all", '\ue582'),
SELF_IMPROVEMENT("mdomz-self_improvement", '\ue829'),
SEND("mdomz-send", '\ue583'),
SENSOR_DOOR("mdomz-sensor_door", '\ue82a'),
SENSOR_WINDOW("mdomz-sensor_window", '\ue82c'),
SENTIMENT_DISSATISFIED("mdomz-sentiment_dissatisfied", '\ue585'),
SENTIMENT_NEUTRAL("mdomz-sentiment_neutral", '\ue587'),
SENTIMENT_SATISFIED("mdomz-sentiment_satisfied", '\ue589'),
SENTIMENT_SATISFIED_ALT("mdomz-sentiment_satisfied_alt", '\ue58b'),
SENTIMENT_SLIGHTLY_DISSATISFIED("mdomz-sentiment_slightly_dissatisfied", '\ue58d'),
SENTIMENT_VERY_DISSATISFIED("mdomz-sentiment_very_dissatisfied", '\ue58f'),
SENTIMENT_VERY_SATISFIED("mdomz-sentiment_very_satisfied", '\ue591'),
SET_MEAL("mdomz-set_meal", '\ue88a'),
SETTINGS("mdomz-settings", '\ue593'),
SETTINGS_APPLICATIONS("mdomz-settings_applications", '\ue595'),
SETTINGS_BACKUP_RESTORE("mdomz-settings_backup_restore", '\ue597'),
SETTINGS_BLUETOOTH("mdomz-settings_bluetooth", '\ue598'),
SETTINGS_BRIGHTNESS("mdomz-settings_brightness", '\ue599'),
SETTINGS_CELL("mdomz-settings_cell", '\ue59b'),
SETTINGS_ETHERNET("mdomz-settings_ethernet", '\ue59d'),
SETTINGS_INPUT_ANTENNA("mdomz-settings_input_antenna", '\ue59e'),
SETTINGS_INPUT_COMPONENT("mdomz-settings_input_component", '\ue59f'),
SETTINGS_INPUT_COMPOSITE("mdomz-settings_input_composite", '\ue5a1'),
SETTINGS_INPUT_HDMI("mdomz-settings_input_hdmi", '\ue5a3'),
SETTINGS_INPUT_SVIDEO("mdomz-settings_input_svideo", '\ue5a5'),
SETTINGS_OVERSCAN("mdomz-settings_overscan", '\ue5a7'),
SETTINGS_PHONE("mdomz-settings_phone", '\ue5a9'),
SETTINGS_POWER("mdomz-settings_power", '\ue5ab'),
SETTINGS_REMOTE("mdomz-settings_remote", '\ue5ac'),
SETTINGS_SYSTEM_DAYDREAM("mdomz-settings_system_daydream", '\ue5ae'),
SETTINGS_VOICE("mdomz-settings_voice", '\ue5b0'),
SHARE("mdomz-share", '\ue5b2'),
SHOP("mdomz-shop", '\ue5b4'),
SHOP_TWO("mdomz-shop_two", '\ue5b6'),
SHOPPING_BAG("mdomz-shopping_bag", '\ue82e'),
SHOPPING_BASKET("mdomz-shopping_basket", '\ue5b8'),
SHOPPING_CART("mdomz-shopping_cart", '\ue5ba'),
SHORT_TEXT("mdomz-short_text", '\ue5bc'),
SHOW_CHART("mdomz-show_chart", '\ue5bd'),
SHUFFLE("mdomz-shuffle", '\ue5be'),
SHUTTER_SPEED("mdomz-shutter_speed", '\ue5bf'),
SICK("mdomz-sick", '\ue8b2'),
SIGNAL_CELLULAR_0_BAR("mdomz-signal_cellular_0_bar", '\ue5c1'),
SIGNAL_CELLULAR_1_BAR("mdomz-signal_cellular_1_bar", '\ue5c3'),
SIGNAL_CELLULAR_2_BAR("mdomz-signal_cellular_2_bar", '\ue5c5'),
SIGNAL_CELLULAR_3_BAR("mdomz-signal_cellular_3_bar", '\ue5c7'),
SIGNAL_CELLULAR_4_BAR("mdomz-signal_cellular_4_bar", '\ue5c9'),
SIGNAL_CELLULAR_ALT("mdomz-signal_cellular_alt", '\ue5ca'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_0_BAR("mdomz-signal_cellular_connected_no_internet_0_bar", '\ue5cb'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_1_BAR("mdomz-signal_cellular_connected_no_internet_1_bar", '\ue5cd'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_2_BAR("mdomz-signal_cellular_connected_no_internet_2_bar", '\ue5cf'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_3_BAR("mdomz-signal_cellular_connected_no_internet_3_bar", '\ue5d1'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_4_BAR("mdomz-signal_cellular_connected_no_internet_4_bar", '\ue5d3'),
SIGNAL_CELLULAR_NO_SIM("mdomz-signal_cellular_no_sim", '\ue5d4'),
SIGNAL_CELLULAR_NULL("mdomz-signal_cellular_null", '\ue5d6'),
SIGNAL_CELLULAR_OFF("mdomz-signal_cellular_off", '\ue5d7'),
SIGNAL_WIFI_0_BAR("mdomz-signal_wifi_0_bar", '\ue5d8'),
SIGNAL_WIFI_1_BAR("mdomz-signal_wifi_1_bar", '\ue5da'),
SIGNAL_WIFI_1_BAR_LOCK("mdomz-signal_wifi_1_bar_lock", '\ue5dc'),
SIGNAL_WIFI_2_BAR("mdomz-signal_wifi_2_bar", '\ue5de'),
SIGNAL_WIFI_2_BAR_LOCK("mdomz-signal_wifi_2_bar_lock", '\ue5e0'),
SIGNAL_WIFI_3_BAR("mdomz-signal_wifi_3_bar", '\ue5e2'),
SIGNAL_WIFI_3_BAR_LOCK("mdomz-signal_wifi_3_bar_lock", '\ue5e4'),
SIGNAL_WIFI_4_BAR("mdomz-signal_wifi_4_bar", '\ue5e6'),
SIGNAL_WIFI_4_BAR_LOCK("mdomz-signal_wifi_4_bar_lock", '\ue5e7'),
SIGNAL_WIFI_OFF("mdomz-signal_wifi_off", '\ue5e8'),
SIM_CARD("mdomz-sim_card", '\ue5e9'),
SIM_CARD_ALERT("mdomz-sim_card_alert", '\ue5eb'),
SINGLE_BED("mdomz-single_bed", '\ue5ed'),
SKIP_NEXT("mdomz-skip_next", '\ue5ef'),
SKIP_PREVIOUS("mdomz-skip_previous", '\ue5f1'),
SLIDESHOW("mdomz-slideshow", '\ue5f3'),
SLOW_MOTION_VIDEO("mdomz-slow_motion_video", '\ue5f5'),
SMART_BUTTON("mdomz-smart_button", '\ue830'),
SMARTPHONE("mdomz-smartphone", '\ue5f6'),
SMOKE_FREE("mdomz-smoke_free", '\ue5f8'),
SMOKING_ROOMS("mdomz-smoking_rooms", '\ue5f9'),
SMS("mdomz-sms", '\ue5fb'),
SMS_FAILED("mdomz-sms_failed", '\ue5fd'),
SNIPPET_FOLDER("mdomz-snippet_folder", '\ue831'),
SNOOZE("mdomz-snooze", '\ue5ff'),
SOAP("mdomz-soap", '\ue833'),
SORT("mdomz-sort", '\ue600'),
SORT_BY_ALPHA("mdomz-sort_by_alpha", '\ue601'),
SOURCE("mdomz-source", '\ue835'),
SOUTH("mdomz-south", '\ue88c'),
SOUTH_EAST("mdomz-south_east", '\ue88d'),
SOUTH_WEST("mdomz-south_west", '\ue88e'),
SPA("mdomz-spa", '\ue602'),
SPACE_BAR("mdomz-space_bar", '\ue604'),
SPEAKER("mdomz-speaker", '\ue605'),
SPEAKER_GROUP("mdomz-speaker_group", '\ue607'),
SPEAKER_NOTES("mdomz-speaker_notes", '\ue609'),
SPEAKER_NOTES_OFF("mdomz-speaker_notes_off", '\ue60b'),
SPEAKER_PHONE("mdomz-speaker_phone", '\ue60d'),
SPEED("mdomz-speed", '\ue60f'),
SPELLCHECK("mdomz-spellcheck", '\ue610'),
SPORTS("mdomz-sports", '\ue611'),
SPORTS_BAR("mdomz-sports_bar", '\ue88f'),
SPORTS_BASEBALL("mdomz-sports_baseball", '\ue612'),
SPORTS_BASKETBALL("mdomz-sports_basketball", '\ue614'),
SPORTS_CRICKET("mdomz-sports_cricket", '\ue616'),
SPORTS_ESPORTS("mdomz-sports_esports", '\ue618'),
SPORTS_FOOTBALL("mdomz-sports_football", '\ue61a'),
SPORTS_GOLF("mdomz-sports_golf", '\ue61c'),
SPORTS_HANDBALL("mdomz-sports_handball", '\ue61e'),
SPORTS_HOCKEY("mdomz-sports_hockey", '\ue61f'),
SPORTS_KABADDI("mdomz-sports_kabaddi", '\ue620'),
SPORTS_MMA("mdomz-sports_mma", '\ue621'),
SPORTS_MOTORSPORTS("mdomz-sports_motorsports", '\ue623'),
SPORTS_RUGBY("mdomz-sports_rugby", '\ue625'),
SPORTS_SOCCER("mdomz-sports_soccer", '\ue627'),
SPORTS_TENNIS("mdomz-sports_tennis", '\ue629'),
SPORTS_VOLLEYBALL("mdomz-sports_volleyball", '\ue62a'),
SQUARE_FOOT("mdomz-square_foot", '\ue62c'),
STACKED_LINE_CHART("mdomz-stacked_line_chart", '\ue8c7'),
STAIRS("mdomz-stairs", '\ue837'),
STAR("mdomz-star", '\ue62e'),
STAR_BORDER("mdomz-star_border", '\ue630'),
STAR_HALF("mdomz-star_half", '\ue631'),
STAR_OUTLINE("mdomz-star_outline", '\ue748'),
STAR_RATE("mdomz-star_rate", '\ue632'),
STARS("mdomz-stars", '\ue633'),
STAY_CURRENT_LANDSCAPE("mdomz-stay_current_landscape", '\ue635'),
STAY_CURRENT_PORTRAIT("mdomz-stay_current_portrait", '\ue637'),
STAY_PRIMARY_LANDSCAPE("mdomz-stay_primary_landscape", '\ue639'),
STAY_PRIMARY_PORTRAIT("mdomz-stay_primary_portrait", '\ue63b'),
STICKY_NOTE_2("mdomz-sticky_note_2", '\ue891'),
STOP("mdomz-stop", '\ue63d'),
STOP_CIRCLE("mdomz-stop_circle", '\ue63f'),
STOP_SCREEN_SHARE("mdomz-stop_screen_share", '\ue641'),
STORAGE("mdomz-storage", '\ue643'),
STORE("mdomz-store", '\ue644'),
STORE_MALL_DIRECTORY("mdomz-store_mall_directory", '\ue646'),
STOREFRONT("mdomz-storefront", '\ue648'),
STRAIGHTEN("mdomz-straighten", '\ue64a'),
STREETVIEW("mdomz-streetview", '\ue64c'),
STRIKETHROUGH_S("mdomz-strikethrough_s", '\ue64d'),
STROLLER("mdomz-stroller", '\ue839'),
STYLE("mdomz-style", '\ue64e'),
SUBDIRECTORY_ARROW_LEFT("mdomz-subdirectory_arrow_left", '\ue650'),
SUBDIRECTORY_ARROW_RIGHT("mdomz-subdirectory_arrow_right", '\ue651'),
SUBJECT("mdomz-subject", '\ue652'),
SUBSCRIPT("mdomz-subscript", '\ue83b'),
SUBSCRIPTIONS("mdomz-subscriptions", '\ue653'),
SUBTITLES("mdomz-subtitles", '\ue655'),
SUBTITLES_OFF("mdomz-subtitles_off", '\ue83c'),
SUBWAY("mdomz-subway", '\ue657'),
SUPERSCRIPT("mdomz-superscript", '\ue83e'),
SUPERVISED_USER_CIRCLE("mdomz-supervised_user_circle", '\ue659'),
SUPERVISOR_ACCOUNT("mdomz-supervisor_account", '\ue65b'),
SUPPORT("mdomz-support", '\ue83f'),
SUPPORT_AGENT("mdomz-support_agent", '\ue841'),
SURROUND_SOUND("mdomz-surround_sound", '\ue65d'),
SWAP_CALLS("mdomz-swap_calls", '\ue65f'),
SWAP_HORIZ("mdomz-swap_horiz", '\ue660'),
SWAP_HORIZONTAL_CIRCLE("mdomz-swap_horizontal_circle", '\ue661'),
SWAP_VERT("mdomz-swap_vert", '\ue663'),
SWAP_VERTICAL_CIRCLE("mdomz-swap_vertical_circle", '\ue664'),
SWITCH_CAMERA("mdomz-switch_camera", '\ue666'),
SWITCH_LEFT("mdomz-switch_left", '\ue842'),
SWITCH_RIGHT("mdomz-switch_right", '\ue844'),
SWITCH_VIDEO("mdomz-switch_video", '\ue668'),
SYNC("mdomz-sync", '\ue66a'),
SYNC_ALT("mdomz-sync_alt", '\ue66b'),
SYNC_DISABLED("mdomz-sync_disabled", '\ue66c'),
SYNC_PROBLEM("mdomz-sync_problem", '\ue66d'),
SYSTEM_UPDATE("mdomz-system_update", '\ue66e'),
SYSTEM_UPDATE_ALT("mdomz-system_update_alt", '\ue670'),
TAB("mdomz-tab", '\ue671'),
TAB_UNSELECTED("mdomz-tab_unselected", '\ue672'),
TABLE_CHART("mdomz-table_chart", '\ue673'),
TABLE_ROWS("mdomz-table_rows", '\ue846'),
TABLE_VIEW("mdomz-table_view", '\ue848'),
TABLET("mdomz-tablet", '\ue675'),
TABLET_ANDROID("mdomz-tablet_android", '\ue677'),
TABLET_MAC("mdomz-tablet_mac", '\ue679'),
TAG_FACES("mdomz-tag_faces", '\ue67b'),
TAP_AND_PLAY("mdomz-tap_and_play", '\ue67d'),
TAPAS("mdomz-tapas", '\ue893'),
TERRAIN("mdomz-terrain", '\ue67e'),
TEXT_FIELDS("mdomz-text_fields", '\ue680'),
TEXT_FORMAT("mdomz-text_format", '\ue681'),
TEXT_ROTATE_UP("mdomz-text_rotate_up", '\ue682'),
TEXT_ROTATE_VERTICAL("mdomz-text_rotate_vertical", '\ue683'),
TEXT_ROTATION_ANGLEDOWN("mdomz-text_rotation_angledown", '\ue684'),
TEXT_ROTATION_ANGLEUP("mdomz-text_rotation_angleup", '\ue685'),
TEXT_ROTATION_DOWN("mdomz-text_rotation_down", '\ue686'),
TEXT_ROTATION_NONE("mdomz-text_rotation_none", '\ue687'),
TEXT_SNIPPET("mdomz-text_snippet", '\ue84a'),
TEXTSMS("mdomz-textsms", '\ue688'),
TEXTURE("mdomz-texture", '\ue68a'),
THEATERS("mdomz-theaters", '\ue68b'),
THUMB_DOWN("mdomz-thumb_down", '\ue68d'),
THUMB_DOWN_ALT("mdomz-thumb_down_alt", '\ue68f'),
THUMB_UP("mdomz-thumb_up", '\ue691'),
THUMB_UP_ALT("mdomz-thumb_up_alt", '\ue693'),
THUMBS_UP_DOWN("mdomz-thumbs_up_down", '\ue695'),
TIME_TO_LEAVE("mdomz-time_to_leave", '\ue697'),
TIMELAPSE("mdomz-timelapse", '\ue699'),
TIMELINE("mdomz-timeline", '\ue69b'),
TIMER("mdomz-timer", '\ue69c'),
TIMER_10("mdomz-timer_10", '\ue69e'),
TIMER_3("mdomz-timer_3", '\ue69f'),
TIMER_OFF("mdomz-timer_off", '\ue6a0'),
TITLE("mdomz-title", '\ue6a2'),
TOC("mdomz-toc", '\ue6a3'),
TODAY("mdomz-today", '\ue6a4'),
TOGGLE_OFF("mdomz-toggle_off", '\ue6a6'),
TOGGLE_ON("mdomz-toggle_on", '\ue6a8'),
TOLL("mdomz-toll", '\ue6aa'),
TONALITY("mdomz-tonality", '\ue6ac'),
TOPIC("mdomz-topic", '\ue84c'),
TOUCH_APP("mdomz-touch_app", '\ue6ae'),
TOUR("mdomz-tour", '\ue84e'),
TOYS("mdomz-toys", '\ue6b0'),
TRACK_CHANGES("mdomz-track_changes", '\ue6b2'),
TRAFFIC("mdomz-traffic", '\ue6b3'),
TRAIN("mdomz-train", '\ue6b5'),
TRAM("mdomz-tram", '\ue6b7'),
TRANSFER_WITHIN_A_STATION("mdomz-transfer_within_a_station", '\ue6b9'),
TRANSFORM("mdomz-transform", '\ue6ba'),
TRANSIT_ENTEREXIT("mdomz-transit_enterexit", '\ue6bb'),
TRANSLATE("mdomz-translate", '\ue6bc'),
TRENDING_DOWN("mdomz-trending_down", '\ue6bd'),
TRENDING_FLAT("mdomz-trending_flat", '\ue6be'),
TRENDING_UP("mdomz-trending_up", '\ue6bf'),
TRIP_ORIGIN("mdomz-trip_origin", '\ue6c0'),
TTY("mdomz-tty", '\ue850'),
TUNE("mdomz-tune", '\ue6c1'),
TURNED_IN("mdomz-turned_in", '\ue6c2'),
TURNED_IN_NOT("mdomz-turned_in_not", '\ue6c4'),
TV("mdomz-tv", '\ue6c5'),
TV_OFF("mdomz-tv_off", '\ue6c7'),
TWO_WHEELER("mdomz-two_wheeler", '\ue749'),
UMBRELLA("mdomz-umbrella", '\ue852'),
UNARCHIVE("mdomz-unarchive", '\ue6c9'),
UNDO("mdomz-undo", '\ue6cb'),
UNFOLD_LESS("mdomz-unfold_less", '\ue6cc'),
UNFOLD_MORE("mdomz-unfold_more", '\ue6cd'),
UNPUBLISHED("mdomz-unpublished", '\ue8c8'),
UNSUBSCRIBE("mdomz-unsubscribe", '\ue6ce'),
UPDATE("mdomz-update", '\ue6d0'),
UPDATE_DISABLED("mdomz-update_disabled", '\ue8d2'),
UPGRADE("mdomz-upgrade", '\ue854'),
USB("mdomz-usb", '\ue6d1'),
VERIFIED("mdomz-verified", '\ue855'),
VERIFIED_USER("mdomz-verified_user", '\ue6d2'),
VERTICAL_ALIGN_BOTTOM("mdomz-vertical_align_bottom", '\ue6d4'),
VERTICAL_ALIGN_CENTER("mdomz-vertical_align_center", '\ue6d5'),
VERTICAL_ALIGN_TOP("mdomz-vertical_align_top", '\ue6d6'),
VERTICAL_DISTRIBUTE("mdomz-vertical_distribute", '\ue8d3'),
VERTICAL_SPLIT("mdomz-vertical_split", '\ue6d7'),
VIBRATION("mdomz-vibration", '\ue6d9'),
VIDEO_CALL("mdomz-video_call", '\ue6db'),
VIDEO_LABEL("mdomz-video_label", '\ue6dd'),
VIDEO_LIBRARY("mdomz-video_library", '\ue6df'),
VIDEO_SETTINGS("mdomz-video_settings", '\ue857'),
VIDEOCAM("mdomz-videocam", '\ue6e1'),
VIDEOCAM_OFF("mdomz-videocam_off", '\ue6e3'),
VIDEOGAME_ASSET("mdomz-videogame_asset", '\ue6e5'),
VIEW_AGENDA("mdomz-view_agenda", '\ue6e7'),
VIEW_ARRAY("mdomz-view_array", '\ue6e9'),
VIEW_CAROUSEL("mdomz-view_carousel", '\ue6eb'),
VIEW_COLUMN("mdomz-view_column", '\ue6ed'),
VIEW_COMFY("mdomz-view_comfy", '\ue6ef'),
VIEW_COMPACT("mdomz-view_compact", '\ue6f1'),
VIEW_DAY("mdomz-view_day", '\ue6f3'),
VIEW_HEADLINE("mdomz-view_headline", '\ue6f5'),
VIEW_LIST("mdomz-view_list", '\ue6f6'),
VIEW_MODULE("mdomz-view_module", '\ue6f8'),
VIEW_QUILT("mdomz-view_quilt", '\ue6fa'),
VIEW_SIDEBAR("mdomz-view_sidebar", '\ue858'),
VIEW_STREAM("mdomz-view_stream", '\ue6fc'),
VIEW_WEEK("mdomz-view_week", '\ue6fe'),
VIGNETTE("mdomz-vignette", '\ue700'),
VISIBILITY("mdomz-visibility", '\ue702'),
VISIBILITY_OFF("mdomz-visibility_off", '\ue704'),
VOICE_CHAT("mdomz-voice_chat", '\ue706'),
VOICE_OVER_OFF("mdomz-voice_over_off", '\ue708'),
VOICEMAIL("mdomz-voicemail", '\ue70a'),
VOLUME_DOWN("mdomz-volume_down", '\ue70b'),
VOLUME_MUTE("mdomz-volume_mute", '\ue70d'),
VOLUME_OFF("mdomz-volume_off", '\ue70f'),
VOLUME_UP("mdomz-volume_up", '\ue711'),
VPN_KEY("mdomz-vpn_key", '\ue713'),
VPN_LOCK("mdomz-vpn_lock", '\ue715'),
WALLPAPER("mdomz-wallpaper", '\ue717'),
WARNING("mdomz-warning", '\ue718'),
WASH("mdomz-wash", '\ue85a'),
WATCH("mdomz-watch", '\ue71a'),
WATCH_LATER("mdomz-watch_later", '\ue71c'),
WATER_DAMAGE("mdomz-water_damage", '\ue895'),
WAVES("mdomz-waves", '\ue71e'),
WB_AUTO("mdomz-wb_auto", '\ue71f'),
WB_CLOUDY("mdomz-wb_cloudy", '\ue721'),
WB_INCANDESCENT("mdomz-wb_incandescent", '\ue723'),
WB_IRIDESCENT("mdomz-wb_iridescent", '\ue725'),
WB_SUNNY("mdomz-wb_sunny", '\ue727'),
WC("mdomz-wc", '\ue729'),
WEB("mdomz-web", '\ue72a'),
WEB_ASSET("mdomz-web_asset", '\ue72c'),
WEEKEND("mdomz-weekend", '\ue72e'),
WEST("mdomz-west", '\ue897'),
WHATSHOT("mdomz-whatshot", '\ue730'),
WHEELCHAIR_PICKUP("mdomz-wheelchair_pickup", '\ue85c'),
WHERE_TO_VOTE("mdomz-where_to_vote", '\ue732'),
WIDGETS("mdomz-widgets", '\ue734'),
WIFI("mdomz-wifi", '\ue736'),
WIFI_CALLING("mdomz-wifi_calling", '\ue85d'),
WIFI_LOCK("mdomz-wifi_lock", '\ue737'),
WIFI_OFF("mdomz-wifi_off", '\ue738'),
WIFI_PROTECTED_SETUP("mdomz-wifi_protected_setup", '\ue85f'),
WIFI_TETHERING("mdomz-wifi_tethering", '\ue739'),
WINE_BAR("mdomz-wine_bar", '\ue898'),
WORK("mdomz-work", '\ue73a'),
WORK_OFF("mdomz-work_off", '\ue73c'),
WORK_OUTLINE("mdomz-work_outline", '\ue73e'),
WRAP_TEXT("mdomz-wrap_text", '\ue73f'),
WRONG_LOCATION("mdomz-wrong_location", '\ue860'),
WYSIWYG("mdomz-wysiwyg", '\ue861'),
YOUTUBE_SEARCHED_FOR("mdomz-youtube_searched_for", '\ue740'),
ZOOM_IN("mdomz-zoom_in", '\ue741'),
ZOOM_OUT("mdomz-zoom_out", '\ue742'),
ZOOM_OUT_MAP("mdomz-zoom_out_map", '\ue743');
public static Material2OutlinedMZ findByDescription(String description) {
for (Material2OutlinedMZ font : values()) {
if (font.getDescription().equals(description)) {
return font;
}
}
throw new IllegalArgumentException("Icon description '" + description + "' is invalid!");
}
private String description;
private int code;
Material2OutlinedMZ(String description, int code) {
this.description = description;
this.code = code;
}
@Override
public String getDescription() {
return description;
}
@Override
public int getCode() {
return code;
}
}
| |
/*
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.calc.marketdata;
import java.time.LocalDate;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.MetaProperty;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.joda.beans.impl.direct.DirectPrivateBeanBuilder;
import com.google.common.collect.ImmutableMap;
import com.opengamma.strata.collect.result.Failure;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.data.ImmutableMarketData;
import com.opengamma.strata.data.MarketData;
import com.opengamma.strata.data.MarketDataId;
import com.opengamma.strata.data.MarketDataName;
import com.opengamma.strata.data.ObservableId;
/**
* Market data that has been built.
* <p>
* The {@link MarketDataFactory} can be used to build market data from external
* sources and by calibration. This implementation of {@link MarketData}
* provides the result, and includes all the market data, such as quotes and curves.
* <p>
* This implementation differs from {@link ImmutableMarketData} because it
* stores the failures that occurred during the build process.
* These errors are exposed to users when data is queried.
*/
@BeanDefinition(builderScope = "private", constructorScope = "package")
public final class BuiltMarketData
implements MarketData, ImmutableBean {
/**
* The underlying market data.
*/
@PropertyDefinition(validate = "notNull")
private final BuiltScenarioMarketData underlying;
//-------------------------------------------------------------------------
@Override
public LocalDate getValuationDate() {
return underlying.getValuationDate().getSingleValue();
}
@Override
public boolean containsValue(MarketDataId<?> id) {
return underlying.containsValue(id);
}
@SuppressWarnings("unchecked")
@Override
public <T> T getValue(MarketDataId<T> id) {
return underlying.getValue(id).getSingleValue();
}
@Override
public <T> Optional<T> findValue(MarketDataId<T> id) {
return underlying.findValue(id).map(v -> v.getSingleValue());
}
@Override
public Set<MarketDataId<?>> getIds() {
return underlying.getIds();
}
@Override
public <T> Set<MarketDataId<T>> findIds(MarketDataName<T> name) {
return underlying.findIds(name);
}
@Override
public Set<ObservableId> getTimeSeriesIds() {
return underlying.getTimeSeriesIds();
}
@Override
public LocalDateDoubleTimeSeries getTimeSeries(ObservableId id) {
return underlying.getTimeSeries(id);
}
//-----------------------------------------------------------------------
/**
* Gets the failures when building single market data values.
*
* @return the single value failures
*/
public ImmutableMap<MarketDataId<?>, Failure> getValueFailures() {
return underlying.getValueFailures();
}
/**
* Gets the failures that occurred when building time series of market data values.
*
* @return the time-series value failures
*/
public ImmutableMap<MarketDataId<?>, Failure> getTimeSeriesFailures() {
return underlying.getTimeSeriesFailures();
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code BuiltMarketData}.
* @return the meta-bean, not null
*/
public static BuiltMarketData.Meta meta() {
return BuiltMarketData.Meta.INSTANCE;
}
static {
MetaBean.register(BuiltMarketData.Meta.INSTANCE);
}
/**
* Creates an instance.
* @param underlying the value of the property, not null
*/
BuiltMarketData(
BuiltScenarioMarketData underlying) {
JodaBeanUtils.notNull(underlying, "underlying");
this.underlying = underlying;
}
@Override
public BuiltMarketData.Meta metaBean() {
return BuiltMarketData.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the underlying market data.
* @return the value of the property, not null
*/
public BuiltScenarioMarketData getUnderlying() {
return underlying;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
BuiltMarketData other = (BuiltMarketData) obj;
return JodaBeanUtils.equal(underlying, other.underlying);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(underlying);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("BuiltMarketData{");
buf.append("underlying").append('=').append(JodaBeanUtils.toString(underlying));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code BuiltMarketData}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code underlying} property.
*/
private final MetaProperty<BuiltScenarioMarketData> underlying = DirectMetaProperty.ofImmutable(
this, "underlying", BuiltMarketData.class, BuiltScenarioMarketData.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"underlying");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -1770633379: // underlying
return underlying;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends BuiltMarketData> builder() {
return new BuiltMarketData.Builder();
}
@Override
public Class<? extends BuiltMarketData> beanType() {
return BuiltMarketData.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code underlying} property.
* @return the meta-property, not null
*/
public MetaProperty<BuiltScenarioMarketData> underlying() {
return underlying;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -1770633379: // underlying
return ((BuiltMarketData) bean).getUnderlying();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code BuiltMarketData}.
*/
private static final class Builder extends DirectPrivateBeanBuilder<BuiltMarketData> {
private BuiltScenarioMarketData underlying;
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case -1770633379: // underlying
return underlying;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -1770633379: // underlying
this.underlying = (BuiltScenarioMarketData) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public BuiltMarketData build() {
return new BuiltMarketData(
underlying);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("BuiltMarketData.Builder{");
buf.append("underlying").append('=').append(JodaBeanUtils.toString(underlying));
buf.append('}');
return buf.toString();
}
}
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.codegen;
import org.apache.spark.sql.catalyst.expressions.UnsafeArrayData;
import org.apache.spark.sql.catalyst.expressions.UnsafeMapData;
import org.apache.spark.sql.catalyst.expressions.UnsafeRow;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import org.apache.spark.unsafe.types.CalendarInterval;
import org.apache.spark.unsafe.types.UTF8String;
/**
* Base class for writing Unsafe* structures.
*/
public abstract class UnsafeWriter {
// Keep internal buffer holder
protected final BufferHolder holder;
// The offset of the global buffer where we start to write this structure.
protected int startingOffset;
protected UnsafeWriter(BufferHolder holder) {
this.holder = holder;
}
/**
* Accessor methods are delegated from BufferHolder class
*/
public final BufferHolder getBufferHolder() {
return holder;
}
public final byte[] getBuffer() {
return holder.getBuffer();
}
public final void reset() {
holder.reset();
}
public final int totalSize() {
return holder.totalSize();
}
public final void grow(int neededSize) {
holder.grow(neededSize);
}
public final int cursor() {
return holder.getCursor();
}
public final void increaseCursor(int val) {
holder.increaseCursor(val);
}
public final void setOffsetAndSizeFromPreviousCursor(int ordinal, int previousCursor) {
setOffsetAndSize(ordinal, previousCursor, cursor() - previousCursor);
}
protected void setOffsetAndSize(int ordinal, int size) {
setOffsetAndSize(ordinal, cursor(), size);
}
protected void setOffsetAndSize(int ordinal, int currentCursor, int size) {
final long relativeOffset = currentCursor - startingOffset;
final long offsetAndSize = (relativeOffset << 32) | (long)size;
write(ordinal, offsetAndSize);
}
protected final void zeroOutPaddingBytes(int numBytes) {
if ((numBytes & 0x07) > 0) {
Platform.putLong(getBuffer(), cursor() + ((numBytes >> 3) << 3), 0L);
}
}
public abstract void setNull1Bytes(int ordinal);
public abstract void setNull2Bytes(int ordinal);
public abstract void setNull4Bytes(int ordinal);
public abstract void setNull8Bytes(int ordinal);
public abstract void write(int ordinal, boolean value);
public abstract void write(int ordinal, byte value);
public abstract void write(int ordinal, short value);
public abstract void write(int ordinal, int value);
public abstract void write(int ordinal, long value);
public abstract void write(int ordinal, float value);
public abstract void write(int ordinal, double value);
public abstract void write(int ordinal, Decimal input, int precision, int scale);
public final void write(int ordinal, UTF8String input) {
writeUnalignedBytes(ordinal, input.getBaseObject(), input.getBaseOffset(), input.numBytes());
}
public final void write(int ordinal, byte[] input) {
write(ordinal, input, 0, input.length);
}
public final void write(int ordinal, byte[] input, int offset, int numBytes) {
writeUnalignedBytes(ordinal, input, Platform.BYTE_ARRAY_OFFSET + offset, numBytes);
}
private void writeUnalignedBytes(
int ordinal,
Object baseObject,
long baseOffset,
int numBytes) {
final int roundedSize = ByteArrayMethods.roundNumberOfBytesToNearestWord(numBytes);
grow(roundedSize);
zeroOutPaddingBytes(numBytes);
Platform.copyMemory(baseObject, baseOffset, getBuffer(), cursor(), numBytes);
setOffsetAndSize(ordinal, numBytes);
increaseCursor(roundedSize);
}
public final void write(int ordinal, CalendarInterval input) {
// grow the global buffer before writing data.
grow(16);
// Write the months and microseconds fields of Interval to the variable length portion.
Platform.putLong(getBuffer(), cursor(), input.months);
Platform.putLong(getBuffer(), cursor() + 8, input.microseconds);
setOffsetAndSize(ordinal, 16);
// move the cursor forward.
increaseCursor(16);
}
public final void write(int ordinal, UnsafeRow row) {
writeAlignedBytes(ordinal, row.getBaseObject(), row.getBaseOffset(), row.getSizeInBytes());
}
public final void write(int ordinal, UnsafeMapData map) {
writeAlignedBytes(ordinal, map.getBaseObject(), map.getBaseOffset(), map.getSizeInBytes());
}
public final void write(UnsafeArrayData array) {
// Unsafe arrays both can be written as a regular array field or as part of a map. This makes
// updating the offset and size dependent on the code path, this is why we currently do not
// provide an method for writing unsafe arrays that also updates the size and offset.
int numBytes = array.getSizeInBytes();
grow(numBytes);
Platform.copyMemory(
array.getBaseObject(),
array.getBaseOffset(),
getBuffer(),
cursor(),
numBytes);
increaseCursor(numBytes);
}
private void writeAlignedBytes(
int ordinal,
Object baseObject,
long baseOffset,
int numBytes) {
grow(numBytes);
Platform.copyMemory(baseObject, baseOffset, getBuffer(), cursor(), numBytes);
setOffsetAndSize(ordinal, numBytes);
increaseCursor(numBytes);
}
protected final void writeBoolean(long offset, boolean value) {
Platform.putBoolean(getBuffer(), offset, value);
}
protected final void writeByte(long offset, byte value) {
Platform.putByte(getBuffer(), offset, value);
}
protected final void writeShort(long offset, short value) {
Platform.putShort(getBuffer(), offset, value);
}
protected final void writeInt(long offset, int value) {
Platform.putInt(getBuffer(), offset, value);
}
protected final void writeLong(long offset, long value) {
Platform.putLong(getBuffer(), offset, value);
}
protected final void writeFloat(long offset, float value) {
Platform.putFloat(getBuffer(), offset, value);
}
protected final void writeDouble(long offset, double value) {
Platform.putDouble(getBuffer(), offset, value);
}
}
| |
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.anycut;
import android.app.Dialog;
import android.app.ListActivity;
import android.content.ContentUris;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.provider.Contacts;
import android.provider.Contacts.People;
import android.provider.Contacts.Phones;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.ListView;
/**
* Presents the user with a list of types of shortucts that can be created.
* When Any Cut is launched through the home screen this is the activity that comes up.
*/
public class CreateShortcutActivity extends ListActivity implements DialogInterface.OnClickListener,
Dialog.OnCancelListener {
private static final boolean ENABLE_ACTION_ICON_OVERLAYS = false;
private static final int REQUEST_PHONE = 1;
private static final int REQUEST_TEXT = 2;
private static final int REQUEST_ACTIVITY = 3;
private static final int REQUEST_CUSTOM = 4;
private static final int LIST_ITEM_DIRECT_CALL = 0;
private static final int LIST_ITEM_DIRECT_TEXT = 1;
private static final int LIST_ITEM_ACTIVITY = 2;
private static final int LIST_ITEM_CUSTOM = 3;
private static final int DIALOG_SHORTCUT_EDITOR = 1;
private Intent mEditorIntent;
@Override
public void onCreate(Bundle savedState) {
super.onCreate(savedState);
setListAdapter(ArrayAdapter.createFromResource(this, R.array.mainMenu,
android.R.layout.simple_list_item_1));
}
@Override
protected void onListItemClick(ListView list, View view, int position, long id) {
switch (position) {
case LIST_ITEM_DIRECT_CALL: {
Intent intent = new Intent(Intent.ACTION_PICK, Phones.CONTENT_URI);
intent.putExtra(Contacts.Intents.UI.TITLE_EXTRA_KEY,
getText(R.string.callShortcutActivityTitle));
startActivityForResult(intent, REQUEST_PHONE);
break;
}
case LIST_ITEM_DIRECT_TEXT: {
Intent intent = new Intent(Intent.ACTION_PICK, Phones.CONTENT_URI);
intent.putExtra(Contacts.Intents.UI.TITLE_EXTRA_KEY,
getText(R.string.textShortcutActivityTitle));
startActivityForResult(intent, REQUEST_TEXT);
break;
}
case LIST_ITEM_ACTIVITY: {
Intent intent = new Intent();
intent.setClass(this, ActivityPickerActivity.class);
startActivityForResult(intent, REQUEST_ACTIVITY);
break;
}
case LIST_ITEM_CUSTOM: {
Intent intent = new Intent();
intent.setClass(this, CustomShortcutCreatorActivity.class);
startActivityForResult(intent, REQUEST_CUSTOM);
break;
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent result) {
if (resultCode != RESULT_OK) {
return;
}
switch (requestCode) {
case REQUEST_PHONE: {
startShortcutEditor(generatePhoneShortcut(result, R.drawable.sym_action_call,
"tel", Intent.ACTION_CALL));
break;
}
case REQUEST_TEXT: {
startShortcutEditor(generatePhoneShortcut(result, R.drawable.sym_action_sms,
"smsto", Intent.ACTION_SENDTO));
break;
}
case REQUEST_ACTIVITY:
case REQUEST_CUSTOM: {
startShortcutEditor(result);
break;
}
}
}
@Override
protected Dialog onCreateDialog(int dialogId) {
switch (dialogId) {
case DIALOG_SHORTCUT_EDITOR: {
return new ShortcutEditorDialog(this, this, this);
}
}
return super.onCreateDialog(dialogId);
}
@Override
protected void onPrepareDialog(int dialogId, Dialog dialog) {
switch (dialogId) {
case DIALOG_SHORTCUT_EDITOR: {
if (mEditorIntent != null) {
// If the editor intent hasn't been set already set it
ShortcutEditorDialog editor = (ShortcutEditorDialog) dialog;
editor.setIntent(mEditorIntent);
mEditorIntent = null;
}
}
}
}
/**
* Starts the shortcut editor
*
* @param shortcutIntent The shortcut intent to edit
*/
private void startShortcutEditor(Intent shortcutIntent) {
mEditorIntent = shortcutIntent;
showDialog(DIALOG_SHORTCUT_EDITOR);
}
public void onCancel(DialogInterface dialog) {
// Remove the dialog, it won't be used again
removeDialog(DIALOG_SHORTCUT_EDITOR);
}
public void onClick(DialogInterface dialog, int which) {
if (which == DialogInterface.BUTTON1) {
// OK button
ShortcutEditorDialog editor = (ShortcutEditorDialog) dialog;
Intent shortcut = editor.getIntent();
setResult(RESULT_OK, shortcut);
finish();
}
// Remove the dialog, it won't be used again
removeDialog(DIALOG_SHORTCUT_EDITOR);
}
/**
* Returns an Intent describing a direct text message shortcut.
*
* @param result The result from the phone number picker
* @return an Intent describing a phone number shortcut
*/
private Intent generatePhoneShortcut(Intent result, int actionResId, String scheme, String action) {
Uri phoneUri = result.getData();
long personId = 0;
String name = null;
String number = null;
int type;
Cursor cursor = getContentResolver().query(phoneUri,
new String[] { Phones.PERSON_ID, Phones.DISPLAY_NAME, Phones.NUMBER, Phones.TYPE },
null, null, null);
try {
cursor.moveToFirst();
personId = cursor.getLong(0);
name = cursor.getString(1);
number = cursor.getString(2);
type = cursor.getInt(3);
} finally {
if (cursor != null) {
cursor.close();
}
}
Intent intent = new Intent();
Uri personUri = ContentUris.withAppendedId(People.CONTENT_URI, personId);
intent.putExtra(Intent.EXTRA_SHORTCUT_ICON,
generatePhoneNumberIcon(personUri, type, actionResId));
// Make the URI a direct tel: URI so that it will always continue to work
phoneUri = Uri.fromParts(scheme, number, null);
intent.putExtra(Intent.EXTRA_SHORTCUT_INTENT, new Intent(action, phoneUri));
intent.putExtra(Intent.EXTRA_SHORTCUT_NAME, name);
return intent;
}
/**
* Generates a phone number shortcut icon. Adds an overlay describing the type of the phone
* number, and if there is a photo also adds the call action icon.
*
* @param personUri The person the phone number belongs to
* @param type The type of the phone number
* @param actionResId The ID for the action resource
* @return The bitmap for the icon
*/
private Bitmap generatePhoneNumberIcon(Uri personUri, int type, int actionResId) {
final Resources r = getResources();
boolean drawPhoneOverlay = true;
Bitmap photo = People.loadContactPhoto(this, personUri, 0, null);
if (photo == null) {
// If there isn't a photo use the generic phone action icon instead
Bitmap phoneIcon = getPhoneActionIcon(r, actionResId);
if (phoneIcon != null) {
photo = phoneIcon;
drawPhoneOverlay = false;
} else {
return null;
}
}
// Setup the drawing classes
int iconSize = (int) r.getDimension(android.R.dimen.app_icon_size);
Bitmap icon = Bitmap.createBitmap(iconSize, iconSize, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(icon);
// Copy in the photo
Paint photoPaint = new Paint();
photoPaint.setDither(true);
photoPaint.setFilterBitmap(true);
Rect src = new Rect(0,0, photo.getWidth(),photo.getHeight());
Rect dst = new Rect(0,0, iconSize,iconSize);
canvas.drawBitmap(photo, src, dst, photoPaint);
// Create an overlay for the phone number type
String overlay = null;
switch (type) {
case Phones.TYPE_HOME:
overlay = "H";
break;
case Phones.TYPE_MOBILE:
overlay = "M";
break;
case Phones.TYPE_WORK:
overlay = "W";
break;
case Phones.TYPE_PAGER:
overlay = "P";
break;
case Phones.TYPE_OTHER:
overlay = "O";
break;
}
if (overlay != null) {
Paint textPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DEV_KERN_TEXT_FLAG);
textPaint.setTextSize(20.0f);
textPaint.setTypeface(Typeface.DEFAULT_BOLD);
textPaint.setColor(r.getColor(R.color.textColorIconOverlay));
textPaint.setShadowLayer(3f, 1, 1, r.getColor(R.color.textColorIconOverlayShadow));
canvas.drawText(overlay, 2, 16, textPaint);
}
// Draw the phone action icon as an overlay
if (ENABLE_ACTION_ICON_OVERLAYS && drawPhoneOverlay) {
Bitmap phoneIcon = getPhoneActionIcon(r, actionResId);
if (phoneIcon != null) {
src.set(0,0, phoneIcon.getWidth(),phoneIcon.getHeight());
int iconWidth = icon.getWidth();
dst.set(iconWidth - 20, -1, iconWidth, 19);
canvas.drawBitmap(phoneIcon, src, dst, photoPaint);
}
}
return icon;
}
/**
* Returns the icon for the phone call action.
*
* @param r The resources to load the icon from
* @param resId The resource ID to load
* @return the icon for the phone call action
*/
private Bitmap getPhoneActionIcon(Resources r, int resId) {
Drawable phoneIcon = r.getDrawable(resId);
if (phoneIcon instanceof BitmapDrawable) {
BitmapDrawable bd = (BitmapDrawable) phoneIcon;
return bd.getBitmap();
} else {
return null;
}
}
}
| |
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 1996, 2011. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 1996, 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.io;
/**
* A buffered character-input stream that keeps track of line numbers. This
* class defines methods {@link #setLineNumber(int)} and {@link
* #getLineNumber()} for setting and getting the current line number
* respectively.
*
* <p> By default, line numbering begins at 0. This number increments at every
* <a href="#lt">line terminator</a> as the data is read, and can be changed
* with a call to <tt>setLineNumber(int)</tt>. Note however, that
* <tt>setLineNumber(int)</tt> does not actually change the current position in
* the stream; it only changes the value that will be returned by
* <tt>getLineNumber()</tt>.
*
* <p> A line is considered to be <a name="lt">terminated</a> by any one of a
* line feed ('\n'), a carriage return ('\r'), or a carriage return followed
* immediately by a linefeed.
*
* @author Mark Reinhold
* @since JDK1.1
*/
public class LineNumberReader extends BufferedReader {
/** The current line number */
private int lineNumber = 0;
/** The line number of the mark, if any */
private int markedLineNumber; // Defaults to 0
/** If the next character is a line feed, skip it */
private boolean skipLF;
/** The skipLF flag when the mark was set */
private boolean markedSkipLF;
/**
* Create a new line-numbering reader, using the default input-buffer
* size.
*
* @param in
* A Reader object to provide the underlying stream
*/
public LineNumberReader(Reader in) {
super(in);
}
/**
* Create a new line-numbering reader, reading characters into a buffer of
* the given size.
*
* @param in
* A Reader object to provide the underlying stream
*
* @param sz
* An int specifying the size of the buffer
*/
public LineNumberReader(Reader in, int sz) {
super(in, sz);
}
/**
* Set the current line number.
*
* @param lineNumber
* An int specifying the line number
*
* @see #getLineNumber
*/
public void setLineNumber(int lineNumber) {
this.lineNumber = lineNumber;
}
/**
* Get the current line number.
*
* @return The current line number
*
* @see #setLineNumber
*/
public int getLineNumber() {
return lineNumber;
}
/**
* Read a single character. <a href="#lt">Line terminators</a> are
* compressed into single newline ('\n') characters. Whenever a line
* terminator is read the current line number is incremented.
*
* @return The character read, or -1 if the end of the stream has been
* reached
*
* @throws IOException
* If an I/O error occurs
*/
@SuppressWarnings("fallthrough")
public int read() throws IOException {
synchronized (lock) {
int c = super.read();
if (skipLF) {
if (c == '\n')
c = super.read();
skipLF = false;
}
switch (c) {
case '\r':
skipLF = true;
case '\n': /* Fall through */
lineNumber++;
return '\n';
}
return c;
}
}
/**
* Read characters into a portion of an array. Whenever a <a
* href="#lt">line terminator</a> is read the current line number is
* incremented.
*
* @param cbuf
* Destination buffer
*
* @param off
* Offset at which to start storing characters
*
* @param len
* Maximum number of characters to read
*
* @return The number of bytes read, or -1 if the end of the stream has
* already been reached
*
* @throws IOException
* If an I/O error occurs
*/
@SuppressWarnings("fallthrough")
public int read(char cbuf[], int off, int len) throws IOException {
synchronized (lock) {
int n = super.read(cbuf, off, len);
for (int i = off; i < off + n; i++) {
int c = cbuf[i];
if (skipLF) {
skipLF = false;
if (c == '\n')
continue;
}
switch (c) {
case '\r':
skipLF = true;
case '\n': /* Fall through */
lineNumber++;
break;
}
}
return n;
}
}
/**
* Read a line of text. Whenever a <a href="#lt">line terminator</a> is
* read the current line number is incremented.
*
* @return A String containing the contents of the line, not including
* any <a href="#lt">line termination characters</a>, or
* <tt>null</tt> if the end of the stream has been reached
*
* @throws IOException
* If an I/O error occurs
*/
public String readLine() throws IOException {
synchronized (lock) {
String l = super.readLine(skipLF);
skipLF = false;
if (l != null)
lineNumber++;
return l;
}
}
/** Maximum skip-buffer size */
private static final int maxSkipBufferSize = 8192;
/** Skip buffer, null until allocated */
private char skipBuffer[] = null;
/**
* Skip characters.
*
* @param n
* The number of characters to skip
*
* @return The number of characters actually skipped
*
* @throws IOException
* If an I/O error occurs
*
* @throws IllegalArgumentException
* If <tt>n</tt> is negative
*/
public long skip(long n) throws IOException {
if (n < 0)
throw new IllegalArgumentException("skip() value is negative");
int nn = (int) Math.min(n, maxSkipBufferSize);
synchronized (lock) {
if ((skipBuffer == null) || (skipBuffer.length < nn))
skipBuffer = new char[nn];
long r = n;
while (r > 0) {
int nc = read(skipBuffer, 0, (int) Math.min(r, nn));
if (nc == -1)
break;
r -= nc;
}
return n - r;
}
}
/**
* Mark the present position in the stream. Subsequent calls to reset()
* will attempt to reposition the stream to this point, and will also reset
* the line number appropriately.
*
* @param readAheadLimit
* Limit on the number of characters that may be read while still
* preserving the mark. After reading this many characters,
* attempting to reset the stream may fail.
*
* @throws IOException
* If an I/O error occurs
*/
public void mark(int readAheadLimit) throws IOException {
synchronized (lock) {
super.mark(readAheadLimit);
markedLineNumber = lineNumber;
markedSkipLF = skipLF;
}
}
/**
* Reset the stream to the most recent mark.
*
* @throws IOException
* If the stream has not been marked, or if the mark has been
* invalidated
*/
public void reset() throws IOException {
synchronized (lock) {
super.reset();
lineNumber = markedLineNumber;
skipLF = markedSkipLF;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end.index;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
import org.apache.phoenix.hbase.index.IndexRegionObserver;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.query.ConnectionQueryServices;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.util.EncodedColumnsUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class ImmutableIndexExtendedIT extends ParallelStatsDisabledIT {
private final String tableDDLOptions;
private final FailingRegionObserver coproc;
private final Boolean useView;
public ImmutableIndexExtendedIT(FailingRegionObserver coproc, Boolean useView) {
this.coproc = coproc;
this.useView = useView;
StringBuilder optionBuilder = new StringBuilder("IMMUTABLE_ROWS=true");
this.tableDDLOptions = optionBuilder.toString();
}
@BeforeClass
public static void doSetup() throws Exception {
Map<String, String> props = Maps.newHashMapWithExpectedSize(1);
props.put(HConstants.HBASE_CLIENT_RETRIES_NUMBER, "5");
setUpTestDriver(new ReadOnlyProps(props));
}
private enum FailStep {
NONE,
PRE_INDEX_TABLE_UPDATE,
POST_INDEX_TABLE_UPDATE
}
private boolean getExpectedStatus(FailStep step) {
boolean status;
switch (step) {
case NONE:
status = true;
break;
case PRE_INDEX_TABLE_UPDATE:
case POST_INDEX_TABLE_UPDATE:
default:
status = false;
}
return status;
}
private int getExpectedUnverifiedRowCount(FailStep step) {
int unverifiedRowCount;
switch (step) {
case POST_INDEX_TABLE_UPDATE:
unverifiedRowCount = 1;
break;
case NONE:
case PRE_INDEX_TABLE_UPDATE:
default:
unverifiedRowCount = 0;
}
return unverifiedRowCount;
}
interface FailingRegionObserver {
FailStep getFailStep();
}
public static class PreMutationFailingRegionObserver extends SimpleRegionObserver
implements FailingRegionObserver {
@Override
public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
throw new IOException();
}
@Override
public FailStep getFailStep() {
return FailStep.PRE_INDEX_TABLE_UPDATE;
}
}
public static class PostMutationFailingRegionObserver extends SimpleRegionObserver
implements FailingRegionObserver{
@Override
public void postBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
throw new IOException();
}
@Override
public FailStep getFailStep() {
return FailStep.POST_INDEX_TABLE_UPDATE;
}
}
public static class FailOnceMutationRegionObserver extends SimpleRegionObserver
implements FailingRegionObserver{
private boolean failOnce = true;
@Override
public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
if (failOnce) {
// next attempt don't raise
failOnce = false;
throw new IOException();
}
}
@Override
public FailStep getFailStep() { return FailStep.NONE; }
}
@Parameterized.Parameters(name ="coproc = {0}, useView = {1}")
public static Collection<Object[]> data() {
List<Object[]> params = Lists.newArrayListWithExpectedSize(6);
boolean[] Booleans = new boolean[] { false, true };
for (boolean useView : Booleans) {
params.add(new Object[]{new PreMutationFailingRegionObserver(), useView});
params.add(new Object[]{new PostMutationFailingRegionObserver(), useView});
params.add(new Object[]{new FailOnceMutationRegionObserver(), useView});
}
return params;
}
private void createAndPopulateTable(Connection conn, String tableName, int rowCount)
throws Exception {
String ddl = "CREATE TABLE " + tableName
+ " (id integer not null primary key, val1 varchar, val2 varchar, val3 varchar)"
+ tableDDLOptions;
conn.createStatement().execute(ddl);
String dml = "UPSERT INTO " + tableName + " (id, val1, val2, val3) VALUES (?, ?, ?, ?)";
PreparedStatement stmt = conn.prepareStatement(dml);
for (int id = 1; id <= rowCount; ++id) {
stmt.setInt(1, id);
stmt.setString(2, "a" + id);
stmt.setString(3, "ab" + id);
stmt.setString(4, "abc" + id);
stmt.executeUpdate();
}
conn.commit();
}
private void createView(Connection conn, String dataTable, String viewTable)
throws Exception {
String ddl = "CREATE VIEW " + viewTable + " AS SELECT * FROM " + dataTable;
conn.createStatement().execute(ddl);
}
private void createIndex(Connection conn, String dataTable, String indexTable)
throws Exception {
String ddl = "CREATE INDEX " + indexTable + " on " + dataTable
+ " (val1) include (val2, val3)";
conn.createStatement().execute(ddl);
conn.commit();
TestUtil.waitForIndexState(conn, indexTable, PIndexState.ACTIVE);
}
private static int getRowCountForEmptyColValue(Connection conn, String tableName,
byte[] valueBytes) throws IOException, SQLException {
PTable table = PhoenixRuntime.getTable(conn, tableName);
byte[] emptyCF = SchemaUtil.getEmptyColumnFamily(table);
byte[] emptyCQ = EncodedColumnsUtil.getEmptyKeyValueInfo(table).getFirst();
ConnectionQueryServices queryServices =
conn.unwrap(PhoenixConnection.class).getQueryServices();
HTable htable = (HTable) queryServices.getTable(table.getPhysicalName().getBytes());
Scan scan = new Scan();
scan.addColumn(emptyCF, emptyCQ);
ResultScanner resultScanner = htable.getScanner(scan);
int count = 0;
for (Result result = resultScanner.next(); result != null; result = resultScanner.next()) {
if (Bytes.compareTo(result.getValue(emptyCF, emptyCQ), 0, valueBytes.length,
valueBytes, 0, valueBytes.length) == 0) {
++count;
}
}
return count;
}
private static void verifyRowCountForEmptyCol(Connection conn, String indexTable,
int expectedVerifiedCount, int expectedUnverifiedCount) throws Exception {
assertEquals(expectedVerifiedCount,
getRowCountForEmptyColValue(conn, indexTable, IndexRegionObserver.VERIFIED_BYTES));
assertEquals(expectedUnverifiedCount,
getRowCountForEmptyColValue(conn, indexTable, IndexRegionObserver.UNVERIFIED_BYTES));
}
@Test
public void testFailingUpsertMutations() throws Exception {
String dataTable = "TBL_" + generateUniqueName();
String indexTable = "IND_" + generateUniqueName();
String viewTable = "VIEW_" + generateUniqueName();
try (Connection conn = DriverManager.getConnection(getUrl())) {
final int initialRowCount = 2;
createAndPopulateTable(conn, dataTable, initialRowCount);
createView(conn, dataTable, viewTable);
String baseTable = useView ? viewTable : dataTable;
createIndex(conn, baseTable, indexTable);
String index = PhoenixRuntime.getTable(conn, indexTable).getPhysicalName().getString();
TestUtil.addCoprocessor(conn, index, coproc.getClass());
boolean upsertStatus = true;
try {
String dml = "UPSERT INTO " + baseTable + " VALUES (3, 'a3', 'ab3', 'abc3')";
conn.createStatement().execute(dml);
conn.commit();
} catch (Exception ex) {
upsertStatus = false;
}
boolean expectedStatus = getExpectedStatus(coproc.getFailStep());
assertEquals(expectedStatus, upsertStatus);
String dql = "SELECT * FROM " + baseTable + " WHERE id = 3";
ResultSet rs = conn.createStatement().executeQuery(dql);
if (!upsertStatus) {
// verify that the row was not inserted into the data table
assertFalse(rs.next());
verifyRowCountForEmptyCol(conn, indexTable, initialRowCount,
getExpectedUnverifiedRowCount(coproc.getFailStep()));
} else {
assertTrue(rs.next());
assertEquals(3, rs.getInt(1));
verifyRowCountForEmptyCol(conn, indexTable, initialRowCount + 1,
getExpectedUnverifiedRowCount(coproc.getFailStep()));
}
TestUtil.removeCoprocessor(conn, index, coproc.getClass());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.