gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.jolokia.handler.list;
/*
* Copyright 2009-2011 Roland Huss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.*;
import javax.management.*;
import org.json.simple.JSONObject;
/**
* Tree of MBean meta data. This map is a container for one or more MBeanInfo meta data which can be obtained
* via a <code>list</code> request. The full structure in its JSON representation looks like below. The amount
* of data included can be fine tuned in two ways:
* <ul>
* <li>With a <code>maxDepth</code> parameter given at construction time, the size of the map can be restricted
* (from top down)</li>
* <li>A given path select onkly a partial information from the tree</li>
* </ul>
* Both limiting factors are taken care of when adding the information so that this map doesnt get unnecessarily
* to large.
*
* <pre>
* {
* <domain> :
* {
* <prop list> :
* {
* "attr" :
* {
* <attr name> :
* {
* "type" : <attribute type>,
* "desc" : <textual description of attribute>,
* "rw" : true/false
* },
* ....
* },
* "op" :
* {
* <operation name> :
* {
* "args" : [
* {
* "type" : <argument type>
* "name" : <argument name>
* "desc" : <textual description of argument>
* },
* .....
* ],
* "ret" : <return type>,
* "desc" : <textual description of operation>
* },
* .....
* },
* "not" :
* {
* "name" : <name>,
* "desc" : <desc>,
* "types" : [ <type1>, <type2> ]
* }
* },
* ....
* },
* ....
* }
* </pre>
* @author roland
* @since 13.09.11
*/
public class MBeanInfoData {
// max depth for map to return
private int maxDepth;
// stack for an inner path
private Stack<String> pathStack;
// Map holding information
private JSONObject infoMap;
// Initialise updaters
private static final Map<String,DataUpdater> UPDATERS = new HashMap<String, DataUpdater>();
// How to order keys in Object Names
private boolean useCanonicalName;
static {
for (DataUpdater updater : new DataUpdater[] {
new DescriptionDataUpdater(),
new AttributeDataUpdater(),
new OperationDataUpdater(),
new NotificationDataUpdater()
}) {
UPDATERS.put(updater.getKey(),updater);
}
}
/**
* Constructor taking a max depth. The <em>max depth</em> specifies how deep the info tree should be build
* up. The tree will be truncated if it gets larger than this value. A <em>path</em> (in form of a stack)
* can be given, in which only a sub information is (sub-tree or leaf value) is stored
*
* @param pMaxDepth max depth
* @param pPathStack the stack for restricting the information to add. The given stack will be cloned
* and is left untouched.
* @param pUseCanonicalName whether to use canonical name in listings
*/
public MBeanInfoData(int pMaxDepth, Stack<String> pPathStack, boolean pUseCanonicalName) {
maxDepth = pMaxDepth;
useCanonicalName = pUseCanonicalName;
pathStack = pPathStack != null ? (Stack<String>) pPathStack.clone() : new Stack<String>();
infoMap = new JSONObject();
}
/**
* The first two levels of this map (tree) consist of the MBean's domain name and name properties, which are
* independent of an MBean's meta data. If the max depth given at construction time is less or equals than 2 (and
* no inner path into the map is given), then a client of this map does not need to query the MBeanServer for
* MBeanInfo meta data.
* <p></p>
* This method checks this condition and returns true if this is the case. As side effect it will update this
* map with the name part extracted from the given object name
*
* @param pName the objectname used for the first two levels
* @return true if the object name has been added.
*/
public boolean handleFirstOrSecondLevel(ObjectName pName) {
if (maxDepth == 1 && pathStack.size() == 0) {
// Only add domain names with a dummy value if max depth is restricted to 1
// But only when used without path
infoMap.put(pName.getDomain(), 1);
return true;
} else if (maxDepth == 2 && pathStack.size() == 0) {
// Add domain an object name into the map, final value is a dummy value
JSONObject mBeansMap = getOrCreateJSONObject(infoMap, pName.getDomain());
mBeansMap.put(getKeyPropertyString(pName),1);
return true;
}
return false;
}
private String getKeyPropertyString(ObjectName pName) {
return useCanonicalName ? pName.getCanonicalKeyPropertyListString() : pName.getKeyPropertyListString();
}
/**
* Add information about an MBean as obtained from an {@link MBeanInfo} descriptor. The information added
* can be restricted by a given path (which has already be prepared as a stack). Also, a max depth as given in the
* constructor restricts the size of the map from the top.
*
* @param mBeanInfo the MBean info
* @param pName the object name of the MBean
*/
public void addMBeanInfo(MBeanInfo mBeanInfo, ObjectName pName)
throws InstanceNotFoundException, IntrospectionException, ReflectionException, IOException {
JSONObject mBeansMap = getOrCreateJSONObject(infoMap, pName.getDomain());
JSONObject mBeanMap = getOrCreateJSONObject(mBeansMap, getKeyPropertyString(pName));
// Trim down stack to get rid of domain/property list
Stack<String> stack = truncatePathStack(2);
if (stack.empty()) {
addFullMBeanInfo(mBeanMap, mBeanInfo);
} else {
addPartialMBeanInfo(mBeanMap, mBeanInfo,stack);
}
// Trim if required
if (mBeanMap.size() == 0) {
mBeansMap.remove(getKeyPropertyString(pName));
if (mBeansMap.size() == 0) {
infoMap.remove(pName.getDomain());
}
}
}
/**
* Add an exception which occurred during extraction of an {@link MBeanInfo} for
* a certain {@link ObjectName} to this map.
*
* @param pName MBean name for which the error occurred
* @param pExp exception occurred
* @throws IOException if this method decides to rethrow the execption
*/
public void handleException(ObjectName pName, IOException pExp) throws IOException {
// In case of a remote call, IOException can occur e.g. for
// NonSerializableExceptions
if (pathStack.size() == 0) {
addException(pName, pExp);
} else {
// Happens for a deeper request, i.e with a path pointing directly into an MBean,
// Hence we throw immediately an error here since there will be only this exception
// and no extra info
throw new IOException("IOException for MBean " + pName + " (" + pExp.getMessage() + ")",pExp);
}
}
/**
* Add an exception which occurred during extraction of an {@link MBeanInfo} for
* a certain {@link ObjectName} to this map.
*
* @param pName MBean name for which the error occurred
* @param pExp exception occurred
* @throws IllegalStateException if this method decides to rethrow the exception
*/
public void handleException(ObjectName pName, IllegalStateException pExp) {
// This happen happens for JBoss 7.1 in some cases.
if (pathStack.size() == 0) {
addException(pName, pExp);
} else {
throw new IllegalStateException("IllegalStateException for MBean " + pName + " (" + pExp.getMessage() + ")",pExp);
}
}
/**
* Add an exception which occurred during extraction of an {@link MBeanInfo} for
* a certain {@link ObjectName} to this map.
*
* @param pName MBean name for which the error occurred
* @param pExp exception occurred
* @throws IllegalStateException if this method decides to rethrow the exception
*/
public void handleException(ObjectName pName, InstanceNotFoundException pExp) throws InstanceNotFoundException {
// This happen happens for JBoss 7.1 in some cases (i.e. ResourceAdapterModule)
if (pathStack.size() == 0) {
addException(pName, pExp);
} else {
throw new InstanceNotFoundException("InstanceNotFoundException for MBean " + pName + " (" + pExp.getMessage() + ")");
}
}
// Add an exception to the info map
private void addException(ObjectName pName, Exception pExp) {
JSONObject mBeansMap = getOrCreateJSONObject(infoMap, pName.getDomain());
JSONObject mBeanMap = getOrCreateJSONObject(mBeansMap, getKeyPropertyString(pName));
mBeanMap.put(DataKeys.ERROR.getKey(), pExp.toString());
}
/**
* Extract either a sub tree or a leaf value. If a path is used, then adding MBeanInfos has added them
* as if no path were given (i.e. in it original place in the tree) but leaves out other information
* not included by the path. This method then moves up the part pointed to by the path to the top of the
* tree hierarchy. It also takes into account the maximum depth of the tree and truncates below
*
* @return either a Map for a subtree or the leaf value as an object
*/
public Object truncate() {
Object value = navigatePath();
if (maxDepth == 0) {
return value;
}
if (! (value instanceof JSONObject)) {
return value;
} else {
// Truncate all levels below
return truncateJSONObject((JSONObject) value, maxDepth);
}
}
// =====================================================================================================
private void addFullMBeanInfo(JSONObject pMBeanMap, MBeanInfo pMBeanInfo) {
for (DataUpdater updater : UPDATERS.values()) {
updater.update(pMBeanMap,pMBeanInfo,null);
}
}
private void addPartialMBeanInfo(JSONObject pMBeanMap, MBeanInfo pMBeanInfo, Stack<String> pPathStack) {
String what = pPathStack.empty() ? null : pPathStack.pop();
DataUpdater updater = UPDATERS.get(what);
if (updater != null) {
updater.update(pMBeanMap, pMBeanInfo, pPathStack);
} else {
throw new IllegalArgumentException("Illegal path element " + what);
}
}
private JSONObject getOrCreateJSONObject(JSONObject pMap, String pKey) {
JSONObject nMap = (JSONObject) pMap.get(pKey);
if (nMap == null) {
nMap = new JSONObject();
pMap.put(pKey, nMap);
}
return nMap;
}
private Object truncateJSONObject(JSONObject pValue, int pMaxDepth) {
if (pMaxDepth == 0) {
return 1;
}
JSONObject ret = new JSONObject();
Set<Map.Entry> entries = pValue.entrySet();
for (Map.Entry entry : entries) {
Object value = entry.getValue();
Object key = entry.getKey();
if (value instanceof JSONObject) {
ret.put(key, truncateJSONObject((JSONObject) value, pMaxDepth - 1));
} else {
ret.put(key,value);
}
}
return ret;
}
// Trim down the stack by some value or return an empty stack
private Stack<String> truncatePathStack(int pLevel) {
if (pathStack.size() < pLevel) {
return new Stack<String>();
} else {
// Trim of domain and MBean properties
// pathStack gets cloned here since the processing will eat it up
Stack<String> ret = (Stack<String>) pathStack.clone();
for (int i = 0;i < pLevel;i++) {
ret.pop();
}
return ret;
}
}
// Navigate to sub map or leaf value
private Object navigatePath() {
int size = pathStack.size();
JSONObject innerMap = infoMap;
while (size > 0) {
Collection vals = innerMap.values();
if (vals.size() == 0) {
return innerMap;
} else if (vals.size() != 1) {
throw new IllegalStateException("Internal: More than one key found when extracting with path: " + vals);
}
Object value = vals.iterator().next();
// End leaf, return it ....
if (size == 1) {
return value;
}
// Dive in deeper ...
if (!(value instanceof JSONObject)) {
throw new IllegalStateException("Internal: Value within path extraction must be a Map, not " + value.getClass());
}
innerMap = (JSONObject) value;
--size;
}
return innerMap;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* def
*/
package com.microsoft.azure.management.network.v2019_09_01.implementation;
import com.microsoft.azure.arm.resources.collection.implementation.GroupableResourcesCoreImpl;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGateways;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGateway;
import rx.Observable;
import rx.Completable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import com.microsoft.azure.arm.resources.ResourceUtilsCore;
import com.microsoft.azure.arm.utils.RXMapper;
import rx.functions.Func1;
import com.microsoft.azure.PagedList;
import com.microsoft.azure.Page;
import java.util.List;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewayBackendHealth;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewayBackendHealthOnDemand;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewayAvailableWafRuleSetsResult;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewayOnDemandProbe;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewayAvailableSslOptions;
import com.microsoft.azure.management.network.v2019_09_01.ApplicationGatewaySslPredefinedPolicy;
class ApplicationGatewaysImpl extends GroupableResourcesCoreImpl<ApplicationGateway, ApplicationGatewayImpl, ApplicationGatewayInner, ApplicationGatewaysInner, NetworkManager> implements ApplicationGateways {
protected ApplicationGatewaysImpl(NetworkManager manager) {
super(manager.inner().applicationGateways(), manager);
}
@Override
protected Observable<ApplicationGatewayInner> getInnerAsync(String resourceGroupName, String name) {
ApplicationGatewaysInner client = this.inner();
return client.getByResourceGroupAsync(resourceGroupName, name);
}
@Override
protected Completable deleteInnerAsync(String resourceGroupName, String name) {
ApplicationGatewaysInner client = this.inner();
return client.deleteAsync(resourceGroupName, name).toCompletable();
}
@Override
public Observable<String> deleteByIdsAsync(Collection<String> ids) {
if (ids == null || ids.isEmpty()) {
return Observable.empty();
}
Collection<Observable<String>> observables = new ArrayList<>();
for (String id : ids) {
final String resourceGroupName = ResourceUtilsCore.groupFromResourceId(id);
final String name = ResourceUtilsCore.nameFromResourceId(id);
Observable<String> o = RXMapper.map(this.inner().deleteAsync(resourceGroupName, name), id);
observables.add(o);
}
return Observable.mergeDelayError(observables);
}
@Override
public Observable<String> deleteByIdsAsync(String...ids) {
return this.deleteByIdsAsync(new ArrayList<String>(Arrays.asList(ids)));
}
@Override
public void deleteByIds(Collection<String> ids) {
if (ids != null && !ids.isEmpty()) {
this.deleteByIdsAsync(ids).toBlocking().last();
}
}
@Override
public void deleteByIds(String...ids) {
this.deleteByIds(new ArrayList<String>(Arrays.asList(ids)));
}
@Override
public PagedList<ApplicationGateway> listByResourceGroup(String resourceGroupName) {
ApplicationGatewaysInner client = this.inner();
return this.wrapList(client.listByResourceGroup(resourceGroupName));
}
@Override
public Observable<ApplicationGateway> listByResourceGroupAsync(String resourceGroupName) {
ApplicationGatewaysInner client = this.inner();
return client.listByResourceGroupAsync(resourceGroupName)
.flatMapIterable(new Func1<Page<ApplicationGatewayInner>, Iterable<ApplicationGatewayInner>>() {
@Override
public Iterable<ApplicationGatewayInner> call(Page<ApplicationGatewayInner> page) {
return page.items();
}
})
.map(new Func1<ApplicationGatewayInner, ApplicationGateway>() {
@Override
public ApplicationGateway call(ApplicationGatewayInner inner) {
return wrapModel(inner);
}
});
}
@Override
public PagedList<ApplicationGateway> list() {
ApplicationGatewaysInner client = this.inner();
return this.wrapList(client.list());
}
@Override
public Observable<ApplicationGateway> listAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAsync()
.flatMapIterable(new Func1<Page<ApplicationGatewayInner>, Iterable<ApplicationGatewayInner>>() {
@Override
public Iterable<ApplicationGatewayInner> call(Page<ApplicationGatewayInner> page) {
return page.items();
}
})
.map(new Func1<ApplicationGatewayInner, ApplicationGateway>() {
@Override
public ApplicationGateway call(ApplicationGatewayInner inner) {
return wrapModel(inner);
}
});
}
@Override
public ApplicationGatewayImpl define(String name) {
return wrapModel(name);
}
@Override
public Completable startAsync(String resourceGroupName, String applicationGatewayName) {
ApplicationGatewaysInner client = this.inner();
return client.startAsync(resourceGroupName, applicationGatewayName).toCompletable();
}
@Override
public Completable stopAsync(String resourceGroupName, String applicationGatewayName) {
ApplicationGatewaysInner client = this.inner();
return client.stopAsync(resourceGroupName, applicationGatewayName).toCompletable();
}
@Override
public Observable<ApplicationGatewayBackendHealth> backendHealthAsync(String resourceGroupName, String applicationGatewayName) {
ApplicationGatewaysInner client = this.inner();
return client.backendHealthAsync(resourceGroupName, applicationGatewayName)
.map(new Func1<ApplicationGatewayBackendHealthInner, ApplicationGatewayBackendHealth>() {
@Override
public ApplicationGatewayBackendHealth call(ApplicationGatewayBackendHealthInner inner) {
return new ApplicationGatewayBackendHealthImpl(inner, manager());
}
});
}
@Override
public Observable<ApplicationGatewayBackendHealthOnDemand> backendHealthOnDemandAsync(String resourceGroupName, String applicationGatewayName, ApplicationGatewayOnDemandProbe probeRequest) {
ApplicationGatewaysInner client = this.inner();
return client.backendHealthOnDemandAsync(resourceGroupName, applicationGatewayName, probeRequest)
.map(new Func1<ApplicationGatewayBackendHealthOnDemandInner, ApplicationGatewayBackendHealthOnDemand>() {
@Override
public ApplicationGatewayBackendHealthOnDemand call(ApplicationGatewayBackendHealthOnDemandInner inner) {
return new ApplicationGatewayBackendHealthOnDemandImpl(inner, manager());
}
});
}
@Override
public Observable<String> listAvailableServerVariablesAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableServerVariablesAsync()
.flatMap(new Func1<List<String>, Observable<String>>() {
@Override
public Observable<String> call(List<String> innerList) {
return Observable.from(innerList);
}
});}
@Override
public Observable<String> listAvailableRequestHeadersAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableRequestHeadersAsync()
.flatMap(new Func1<List<String>, Observable<String>>() {
@Override
public Observable<String> call(List<String> innerList) {
return Observable.from(innerList);
}
});}
@Override
public Observable<String> listAvailableResponseHeadersAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableResponseHeadersAsync()
.flatMap(new Func1<List<String>, Observable<String>>() {
@Override
public Observable<String> call(List<String> innerList) {
return Observable.from(innerList);
}
});}
@Override
public Observable<ApplicationGatewayAvailableWafRuleSetsResult> listAvailableWafRuleSetsAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableWafRuleSetsAsync()
.map(new Func1<ApplicationGatewayAvailableWafRuleSetsResultInner, ApplicationGatewayAvailableWafRuleSetsResult>() {
@Override
public ApplicationGatewayAvailableWafRuleSetsResult call(ApplicationGatewayAvailableWafRuleSetsResultInner inner) {
return new ApplicationGatewayAvailableWafRuleSetsResultImpl(inner, manager());
}
});
}
@Override
protected ApplicationGatewayImpl wrapModel(ApplicationGatewayInner inner) {
return new ApplicationGatewayImpl(inner.name(), inner, manager());
}
@Override
protected ApplicationGatewayImpl wrapModel(String name) {
return new ApplicationGatewayImpl(name, new ApplicationGatewayInner(), this.manager());
}
@Override
public Observable<ApplicationGatewayAvailableSslOptions> listAvailableSslOptionsAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableSslOptionsAsync()
.map(new Func1<ApplicationGatewayAvailableSslOptionsInner, ApplicationGatewayAvailableSslOptions>() {
@Override
public ApplicationGatewayAvailableSslOptions call(ApplicationGatewayAvailableSslOptionsInner inner) {
return new ApplicationGatewayAvailableSslOptionsImpl(inner, manager());
}
});
}
@Override
public Observable<ApplicationGatewaySslPredefinedPolicy> listAvailableSslPredefinedPoliciesAsync() {
ApplicationGatewaysInner client = this.inner();
return client.listAvailableSslPredefinedPoliciesAsync()
.flatMapIterable(new Func1<Page<ApplicationGatewaySslPredefinedPolicyInner>, Iterable<ApplicationGatewaySslPredefinedPolicyInner>>() {
@Override
public Iterable<ApplicationGatewaySslPredefinedPolicyInner> call(Page<ApplicationGatewaySslPredefinedPolicyInner> page) {
return page.items();
}
})
.map(new Func1<ApplicationGatewaySslPredefinedPolicyInner, ApplicationGatewaySslPredefinedPolicy>() {
@Override
public ApplicationGatewaySslPredefinedPolicy call(ApplicationGatewaySslPredefinedPolicyInner inner) {
return new ApplicationGatewaySslPredefinedPolicyImpl(inner, manager());
}
});
}
@Override
public Observable<ApplicationGatewaySslPredefinedPolicy> getSslPredefinedPolicyAsync(String predefinedPolicyName) {
ApplicationGatewaysInner client = this.inner();
return client.getSslPredefinedPolicyAsync(predefinedPolicyName)
.map(new Func1<ApplicationGatewaySslPredefinedPolicyInner, ApplicationGatewaySslPredefinedPolicy>() {
@Override
public ApplicationGatewaySslPredefinedPolicy call(ApplicationGatewaySslPredefinedPolicyInner inner) {
return new ApplicationGatewaySslPredefinedPolicyImpl(inner, manager());
}
});
}
}
| |
/*
* Part of the CCNx Java Library.
*
* Copyright (C) 2008-2013 Palo Alto Research Center, Inc.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. You should have received
* a copy of the GNU Lesser General Public License along with this library;
* if not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.ccnx.ccn.config;
import org.ccnx.ccn.impl.security.keystore.AESKeyStoreSpi;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.protocol.Component;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.MalformedContentNameStringException;
/**
* A class encapsulating user-specific configuration information and default variable values.
* Eventually this will be handled more sensibly by a user configuration file. This is likely
* to change extensively as the user model evolves.
*/
public class UserConfiguration {
/**
* Our eventual configuration file location.
*/
protected static final String DEFAULT_CONFIGURATION_FILE_NAME = "ccnx_config.bin";
protected static final String DEFAULT_KEYSTORE_FILE_NAME = ".ccnx_keystore";
protected static final String DEFAULT_KEY_CACHE_FILE_NAME = "secure_key_cache.bin";
protected static final String KEY_DIRECTORY = "keyCache";
protected static final String ADDRESSBOOK_FILE_NAME = "ccnx_addressbook.xml";
protected static final String CCNX_DEFAULT_NAMESPACE = "/ccnx.org";
protected static final Component DEFAULT_USER_NAMESPACE_MARKER = new Component("Users");
protected static final Component DEFAULT_KEY_NAMESPACE_MARKER = new Component("Keys");
/**
* Currently very cheezy keystore handling. Will improve when we can actually use
* java 1.6-only features.
*/
protected static final String DEFAULT_KEYSTORE_PASSWORD = "Th1s1sn0t8g00dp8ssw0rd.";
protected static final int DEFAULT_KEY_LENGTH = 1024;
protected static final String DEFAULT_KEY_ALG = "RSA";
/**
* Change to all lower case. Most OSes turn out to be non case-sensitive
* for this, but not all.
*/
protected static final String DEFAULT_KEY_ALIAS = "ccnxuser";
protected static final String DEFAULT_KEYSTORE_TYPE = "PKCS12";
/**
* Default prefix to use, e.g. for user information if not overridden by local stuff.
*/
protected static final String CCNX_DEFAULT_NAMESPACE_PROPERTY =
"org.ccnx.config.CCNxNamespace";
protected static final String CCNX_DEFAULT_NAMESPACE_ENVIRONMENT_VARIABLE = "CCNX_NAMESPACE";
/**
* Default keystore type for symmetric keys
*/
protected static final String DEFAULT_SYMMETRIC_KEYSTORE_TYPE = AESKeyStoreSpi.TYPE;
/**
* Default value of user configuration directory name -- this is not
* the full path, merely the directory name itself; by default we interpret
* the directory name as <user_home>/.ccnx.
* @return
*/
protected static final String CCNX_DEFAULT_USER_CONFIG_DIR_NAME = ".ccnx";
/**
* Directory (subdirectory of User.home) where all user metadata is kept.
* Property/environment variable to set the user configuration directory (full path).
*/
protected static final String CCNX_USER_CONFIG_DIR_PROPERTY =
"org.ccnx.config.CCNxDir";
protected static final String CCNX_USER_CONFIG_DIR_ENVIRONMENT_VARIABLE = "CCNX_DIR";
/**
* User friendly name, by default user.name Java property
*/
protected static final String CCNX_USER_NAME_PROPERTY =
"org.ccnx.config.UserName";
protected static final String CCNX_USER_NAME_ENVIRONMENT_VARIABLE = "CCNX_USER_NAME";
/**
* User namespace, by default ccnxNamespace()/<DEFAULT_USER_NAMESPACE_MARKER>/userName();
* the user namespace prefix will be set to the value given here -- so the user
* namespace will be userNamespacePrefix()/<DEFAULT_USER_NAMESPACE_MARKER>/<user name>
* for either a user name we are given or our default user name.
*/
protected static final String CCNX_USER_NAMESPACE_PREFIX_PROPERTY =
"org.ccnx.config.UserNamespacePrefix";
protected static final String CCNX_USER_NAMESPACE_PREFIX_ENVIRONMENT_VARIABLE = "CCNX_USER_NAMESPACE_PREFIX";
/**
* User namespace, by default ccnxNamespace()/<DEFAULT_USER_NAMESPACE_MARKER>/userName();
* the user namespace will be set to the value given here -- we don't add the
* user namespace marker or userName().
*/
protected static final String CCNX_USER_NAMESPACE_PROPERTY =
"org.ccnx.config.UserNamespace";
protected static final String CCNX_USER_NAMESPACE_ENVIRONMENT_VARIABLE = "CCNX_USER_NAMESPACE";
/**
* Property and variable to set the keystore file name to something other than the default .ccnx_keystore
* (the directory is handled separately, as the CCNX_USER_CONFIG_DIRECTORY...)
*/
protected static final String CCNX_KEYSTORE_FILENAME_PROPERTY =
"org.ccnx.config.KeystoreFilename";
protected static final String CCNX_KEYSTORE_FILENAME_ENVIRONMENT_VARIABLE = "CCNX_KEYSTORE_FILENAME";
/**
* Property and variable to set the keystore password to something other than the default;
* can also be overridden in calls to the key manager constructor.
*/
protected static final String CCNX_KEYSTORE_PASSWORD_PROPERTY =
"org.ccnx.config.KeystorePassword";
protected static final String CCNX_KEYSTORE_PASSWORD_ENVIRONMENT_VARIABLE = "CCNX_KEYSTORE_PASSWORD";
/**
* Property and variable to set the keystore file name to something other than the default ccnx_user.conf
* (the directory is handled separately, as the CCNX_USER_CONFIG_DIRECTORY...)
*/
protected static final String CCNX_CONFIGURATION_FILENAME_PROPERTY =
"org.ccnx.config.ConfigurationFilename";
protected static final String CCNX_CONFIGURATION_FILENAME_ENVIRONMENT_VARIABLE = "CCNX_CONFIG_FILENAME";
/**
* Property and variable to set the key locator to use for the default key. Need something
* more complicated, probably read from a configuration file. But this will get us started.
* Parse this as "key locator for the default key", not "the default value for the key locator".
*/
protected static final String CCNX_DEFAULT_KEY_LOCATOR_PROPERTY =
"org.ccnx.config.DefaultKeyLocator";
protected static final String CCNX_DEFAULT_KEY_LOCATOR_ENVIRONMENT_VARIABLE = "CCNX_DEFAULT_KEY_LOCATOR";
/**
* Property and variable to control whether we publish keys or not.
*/
protected static final String CCNX_PUBLISH_KEYS_PROPERTY =
"org.ccnx.config.PublishKeys";
protected static final String CCNX_PUBLISH_KEYS_ENVIRONMENT_VARIABLE = "CCNX_PUBLISH_KEYS";
/**
* Property and variable to control whether we load/can set user's key-related configuration
* (key locators, key cache, etc). Key cache saving and loading is additionally handled
* below -- both this variable and that one need to be set to true to automatically save
* and load the key cache; if CCNX_SAVE_KEY_CACHE_CONFIGURATION_PROPERTY is true but this
* CCNX_USE_KEY_CONFIGURATION_PROPERTY is false, then users can manually save and load the
* key cache, but it will not be handled automatically on startup.
*
*/
protected static final String CCNX_USE_KEY_CONFIGURATION_PROPERTY = "org.ccnx.config.UseKeyConfiguration";
protected static final String CCNX_USE_KEY_CONFIGURATION_ENVIRONMENT_VARIABLE = "CCNX_USE_KEY_CONFIGURATION";
/**
* Variable to control whether key cache is saved on request and reloaded on startup.
* See CCNX_USE_KEY_CONFIGURATION_PROPERTY.
*/
protected static final String CCNX_SAVE_KEY_CACHE_PROPERTY = "org.ccnx.config.SaveKeyCache";
protected static final String CCNX_SAVE_KEY_CACHE_ENVIRONMENT_VARIABLE = "CCNX_SAVE_KEY_CACHE";
protected static final String DEFAULT_SAVE_KEY_CACHE_SETTING = SystemConfiguration.STRING_FALSE; // default to off for now.
/**
* Value of CCN directory.
*/
protected static String _userConfigurationDir;
/**
* User name. By default value of user.name property.
*/
protected static String _userName;
/**
* CCNx (control) prefix.
*/
protected static ContentName _defaultNamespace;
/**
* User prefix (e.g. for keys). By default, the user namespace prefix together with user information.
*/
protected static ContentName _userNamespace;
/**
* User namespace prefix (e.g. for keys). By default, the CCNX prefix
*/
protected static ContentName _userNamespacePrefix;
/**
* Keystore file name. This is the name of the actual file, without the directory.
*/
protected static String _keystoreFileName;
/**
* Keystore password, if not default. Yes we know this is bad; it's
* on our list of things to improve.
*/
protected static String _keystorePassword;
/**
* Configuration file name. This is the name of the actual file, without the directory.
*/
protected static String _configurationFileName;
/**
* Do we publish keys by default?
*/
protected static Boolean _publishKeys;
/**
* Do we load stored state about cached secret keys, key locators (credentials) to
* use, and so on? Setting this to false can prevent interactions between unit tests
* and the user's internal configuration data. If false, we also prevent writing
* to configuration state.
*/
protected static Boolean _useKeyConfiguration;
/**
* Do we automatically save and load the key cache as part of the configuration data?
* (Automatic loading of key cache happens only if _useKeyConfiguration is also true.)
*/
protected static Boolean _saveAndLoadKeyCache;
protected static final String USER_DIR = System.getProperty("user.home");
public static String FILE_SEP = System.getProperty("file.separator");
public static void setUserName(String name) {
_userName = name;
}
public static String userName() {
if (null == _userName) {
_userName = SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_USER_NAME_PROPERTY, CCNX_USER_NAME_ENVIRONMENT_VARIABLE,
System.getProperty("user.name"));
}
return _userName;
}
public static void setUserConfigurationDirectory(String path) {
_userConfigurationDir = path;
}
public static String userConfigurationDirectory() {
if (null == _userConfigurationDir) {
_userConfigurationDir = SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_USER_CONFIG_DIR_PROPERTY,
CCNX_USER_CONFIG_DIR_ENVIRONMENT_VARIABLE,
USER_DIR + FILE_SEP + CCNX_DEFAULT_USER_CONFIG_DIR_NAME);
if (null == _userConfigurationDir)
_userConfigurationDir = USER_DIR + FILE_SEP + CCNX_DEFAULT_USER_CONFIG_DIR_NAME;
}
return _userConfigurationDir;
}
public static void setDefaultNamespacePrefix(String defaultNamespacePrefix) throws MalformedContentNameStringException {
_defaultNamespace = (null == defaultNamespacePrefix) ? null : ContentName.fromNative(defaultNamespacePrefix);
}
public static ContentName defaultNamespace() {
if (null == _defaultNamespace) {
String defaultNamespaceString =
SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_DEFAULT_NAMESPACE_PROPERTY, CCNX_DEFAULT_NAMESPACE_ENVIRONMENT_VARIABLE,
CCNX_DEFAULT_NAMESPACE);
try {
_defaultNamespace = ContentName.fromNative(defaultNamespaceString);
} catch (MalformedContentNameStringException e) {
Log.severe("Attempt to configure invalid default CCNx namespace: {0}!", defaultNamespaceString);
throw new RuntimeException("Attempt to configure invalid default CCNx namespace: " + defaultNamespaceString + "!");
}
}
return _defaultNamespace;
}
public static void setUserNamespace(String userNamespace) throws MalformedContentNameStringException {
_userNamespace = (null == userNamespace) ? null : ContentName.fromNative(userNamespace);
}
public static ContentName userNamespace() {
if (null == _userNamespace) {
String userNamespaceString = SystemConfiguration.retrievePropertyOrEnvironmentVariable(
CCNX_USER_NAMESPACE_PROPERTY, CCNX_USER_NAMESPACE_ENVIRONMENT_VARIABLE, null);
if (null != userNamespaceString) {
try {
_userNamespace = ContentName.fromNative(userNamespaceString);
} catch (MalformedContentNameStringException e) {
Log.severe("Attempt to configure invalid default user namespace: {0}!", userNamespaceString);
throw new RuntimeException("Attempt to configure invalid default user namespace: " + userNamespaceString + "!");
}
} else {
_userNamespace = userNamespace(userName());
}
}
return _userNamespace;
}
/**
* User the userNamespacePrefix() to generate a namespace for a particular user
* @param userName
* @return
*/
public static ContentName userNamespace(String userName) {
if (null == userName) {
userName = userName();
}
return new ContentName(userNamespacePrefix(), userName);
}
public static void setUserNamespacePrefix(String userNamespacePrefix) throws MalformedContentNameStringException {
_userNamespacePrefix = (null == userNamespacePrefix) ? null : ContentName.fromNative(userNamespacePrefix);
}
public static ContentName userNamespacePrefix() {
if (null == _userNamespacePrefix) {
String userNamespacePrefixString = SystemConfiguration.retrievePropertyOrEnvironmentVariable(
CCNX_USER_NAMESPACE_PREFIX_PROPERTY, CCNX_USER_NAMESPACE_PREFIX_ENVIRONMENT_VARIABLE, null);
if (null != userNamespacePrefixString) {
try {
_userNamespacePrefix = ContentName.fromNative(userNamespacePrefixString);
} catch (MalformedContentNameStringException e) {
Log.severe("Attempt to configure invalid default user namespace prefix: {0}!", userNamespacePrefixString);
throw new RuntimeException("Attempt to configure invalid default user namespace prefix: " + userNamespacePrefixString + "!");
}
} else {
_userNamespacePrefix = new ContentName(defaultNamespace(), DEFAULT_USER_NAMESPACE_MARKER);
}
}
return _userNamespacePrefix;
}
public static void setKeystoreFileName(String fileName) {
_keystoreFileName = fileName;
}
public static String keystoreFileName() {
if (null == _keystoreFileName) {
_keystoreFileName = SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_KEYSTORE_FILENAME_PROPERTY,
CCNX_KEYSTORE_FILENAME_ENVIRONMENT_VARIABLE,
DEFAULT_KEYSTORE_FILE_NAME);
}
return _keystoreFileName;
}
public static String configurationFileName() {
if (null == _configurationFileName) {
_configurationFileName = SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_CONFIGURATION_FILENAME_PROPERTY,
CCNX_CONFIGURATION_FILENAME_ENVIRONMENT_VARIABLE,
DEFAULT_CONFIGURATION_FILE_NAME);
}
return _configurationFileName;
}
public static String keyCacheFileName() {
return DEFAULT_KEY_CACHE_FILE_NAME;
}
public static void setKeystorePassword(String password) {
_keystorePassword = password;
}
public static String keystorePassword() {
if (null == _keystorePassword) {
_keystorePassword = SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_KEYSTORE_PASSWORD_PROPERTY,
CCNX_KEYSTORE_PASSWORD_ENVIRONMENT_VARIABLE,
DEFAULT_KEYSTORE_PASSWORD);
}
return _keystorePassword;
}
/**
* Don't provide a mechanism to set this here; this is actually configured on the KeyManagers.
* Just provide a means for them to pull in property/environment/configuration file parameters.
* @return
*/
public static String defaultKeyLocator() {
return SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_DEFAULT_KEY_LOCATOR_PROPERTY,
CCNX_DEFAULT_KEY_LOCATOR_ENVIRONMENT_VARIABLE,
null);
}
public static boolean useKeyConfiguration() {
if (null == _useKeyConfiguration) {
String strPublish =
SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_USE_KEY_CONFIGURATION_PROPERTY,
CCNX_USE_KEY_CONFIGURATION_ENVIRONMENT_VARIABLE,
SystemConfiguration.STRING_TRUE);
_useKeyConfiguration = strPublish.equalsIgnoreCase(SystemConfiguration.STRING_TRUE);
}
return _useKeyConfiguration;
}
/**
* Do we save the key cache when asked, and retrieve it on startup?
* @return
*/
public static boolean saveAndLoadKeyCache() {
if (null == _saveAndLoadKeyCache) {
// Set default to be false, until we have turned on key cache encryption
String strPublish =
SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_SAVE_KEY_CACHE_PROPERTY,
CCNX_SAVE_KEY_CACHE_ENVIRONMENT_VARIABLE,
DEFAULT_SAVE_KEY_CACHE_SETTING);
_saveAndLoadKeyCache = strPublish.equalsIgnoreCase(SystemConfiguration.STRING_TRUE);
}
return _saveAndLoadKeyCache;
}
public static void setSaveAndLoadKeyCache(boolean saveKeyCache) {
_saveAndLoadKeyCache = saveKeyCache;
}
public static boolean publishKeys() {
if (null == _publishKeys) {
String strPublish =
SystemConfiguration.retrievePropertyOrEnvironmentVariable(CCNX_PUBLISH_KEYS_PROPERTY,
CCNX_PUBLISH_KEYS_ENVIRONMENT_VARIABLE,
SystemConfiguration.STRING_TRUE);
_publishKeys = strPublish.equalsIgnoreCase(SystemConfiguration.STRING_TRUE);
}
return _publishKeys;
}
public static void setPublishKeys(boolean publish) {
_publishKeys = publish;
}
public static String keyRepositoryDirectory() {
return userConfigurationDirectory() + FILE_SEP + KEY_DIRECTORY; }
public static String addressBookFileName() {
return userConfigurationDirectory() + FILE_SEP + ADDRESSBOOK_FILE_NAME; }
public static String defaultKeyAlgorithm() { return DEFAULT_KEY_ALG; }
public static String defaultKeyAlias() { return DEFAULT_KEY_ALIAS; }
public static String defaultKeystoreType() { return DEFAULT_KEYSTORE_TYPE; }
public static String defaultSymmetricKeystoreType() { return DEFAULT_SYMMETRIC_KEYSTORE_TYPE; }
public static int defaultKeyLength() { return DEFAULT_KEY_LENGTH; }
public static Component defaultKeyNamespaceMarker() { return DEFAULT_KEY_NAMESPACE_MARKER; }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.overlord.hrtr;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes;
import org.apache.druid.indexer.TaskStatus;
import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.indexing.overlord.ImmutableWorkerInfo;
import org.apache.druid.indexing.overlord.TaskRunnerUtils;
import org.apache.druid.indexing.overlord.config.HttpRemoteTaskRunnerConfig;
import org.apache.druid.indexing.worker.TaskAnnouncement;
import org.apache.druid.indexing.worker.Worker;
import org.apache.druid.indexing.worker.WorkerHistoryItem;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.RetryUtils;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.Request;
import org.apache.druid.java.util.http.client.response.StatusResponseHandler;
import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
import org.apache.druid.server.coordination.ChangeRequestHttpSyncer;
import org.apache.druid.server.coordination.ChangeRequestsSnapshot;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.joda.time.DateTime;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
/**
*/
public class WorkerHolder
{
private static final EmittingLogger log = new EmittingLogger(WorkerHolder.class);
public static final TypeReference<ChangeRequestsSnapshot<WorkerHistoryItem>> WORKER_SYNC_RESP_TYPE_REF = new TypeReference<ChangeRequestsSnapshot<WorkerHistoryItem>>()
{
};
private final Worker worker;
private Worker disabledWorker;
protected final AtomicBoolean disabled;
// Known list of tasks running/completed on this worker.
protected final AtomicReference<Map<String, TaskAnnouncement>> tasksSnapshotRef;
private final AtomicReference<DateTime> lastCompletedTaskTime = new AtomicReference<>(DateTimes.nowUtc());
private final AtomicReference<DateTime> blacklistedUntil = new AtomicReference<>();
private final AtomicInteger continuouslyFailedTasksCount = new AtomicInteger(0);
private final ChangeRequestHttpSyncer<WorkerHistoryItem> syncer;
private final ObjectMapper smileMapper;
private final HttpClient httpClient;
private final HttpRemoteTaskRunnerConfig config;
private final Listener listener;
public WorkerHolder(
ObjectMapper smileMapper,
HttpClient httpClient,
HttpRemoteTaskRunnerConfig config,
ScheduledExecutorService workersSyncExec,
Listener listener,
Worker worker,
List<TaskAnnouncement> knownAnnouncements
)
{
this.smileMapper = smileMapper;
this.httpClient = httpClient;
this.config = config;
this.listener = listener;
this.worker = worker;
//worker holder is created disabled and gets enabled after first sync success.
this.disabled = new AtomicBoolean(true);
this.syncer = new ChangeRequestHttpSyncer<>(
smileMapper,
httpClient,
workersSyncExec,
TaskRunnerUtils.makeWorkerURL(worker, "/"),
"/druid-internal/v1/worker",
WORKER_SYNC_RESP_TYPE_REF,
config.getSyncRequestTimeout().toStandardDuration().getMillis(),
config.getServerUnstabilityTimeout().toStandardDuration().getMillis(),
createSyncListener()
);
ConcurrentMap<String, TaskAnnouncement> announcements = new ConcurrentHashMap<>();
if (knownAnnouncements != null) {
knownAnnouncements.forEach(e -> announcements.put(e.getTaskId(), e));
}
tasksSnapshotRef = new AtomicReference<>(announcements);
}
public Worker getWorker()
{
return worker;
}
private Map<String, TaskAnnouncement> getRunningTasks()
{
return tasksSnapshotRef.get().entrySet().stream().filter(
e -> e.getValue().getTaskStatus().isRunnable()
).collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
}
private int getCurrCapacityUsed()
{
int currCapacity = 0;
for (TaskAnnouncement taskAnnouncement : getRunningTasks().values()) {
currCapacity += taskAnnouncement.getTaskResource().getRequiredCapacity();
}
return currCapacity;
}
private Set<String> getAvailabilityGroups()
{
Set<String> retVal = new HashSet<>();
for (TaskAnnouncement taskAnnouncement : getRunningTasks().values()) {
retVal.add(taskAnnouncement.getTaskResource().getAvailabilityGroup());
}
return retVal;
}
public DateTime getBlacklistedUntil()
{
return blacklistedUntil.get();
}
public void setLastCompletedTaskTime(DateTime completedTaskTime)
{
lastCompletedTaskTime.set(completedTaskTime);
}
public void setBlacklistedUntil(DateTime blacklistedUntil)
{
this.blacklistedUntil.set(blacklistedUntil);
}
public ImmutableWorkerInfo toImmutable()
{
Worker w = worker;
if (disabled.get()) {
if (disabledWorker == null) {
disabledWorker = new Worker(
worker.getScheme(),
worker.getHost(),
worker.getIp(),
worker.getCapacity(),
""
);
}
w = disabledWorker;
}
return new ImmutableWorkerInfo(
w,
getCurrCapacityUsed(),
getAvailabilityGroups(),
getRunningTasks().keySet(),
lastCompletedTaskTime.get(),
blacklistedUntil.get()
);
}
public int getContinuouslyFailedTasksCount()
{
return continuouslyFailedTasksCount.get();
}
public void resetContinuouslyFailedTasksCount()
{
this.continuouslyFailedTasksCount.set(0);
}
public void incrementContinuouslyFailedTasksCount()
{
this.continuouslyFailedTasksCount.incrementAndGet();
}
public boolean assignTask(Task task)
{
if (disabled.get()) {
log.info(
"Received task[%s] assignment on worker[%s] when worker is disabled.",
task.getId(),
worker.getHost()
);
return false;
}
URL url = TaskRunnerUtils.makeWorkerURL(worker, "/druid-internal/v1/worker/assignTask");
int numTries = config.getAssignRequestMaxRetries();
try {
return RetryUtils.retry(
() -> {
try {
final StatusResponseHolder response = httpClient.go(
new Request(HttpMethod.POST, url)
.addHeader(HttpHeaders.Names.CONTENT_TYPE, SmileMediaTypes.APPLICATION_JACKSON_SMILE)
.setContent(smileMapper.writeValueAsBytes(task)),
StatusResponseHandler.getInstance(),
config.getAssignRequestHttpTimeout().toStandardDuration()
).get();
if (response.getStatus().getCode() == 200) {
return true;
} else {
throw new RE(
"Failed to assign task[%s] to worker[%s]. Response Code[%s] and Message[%s]. Retrying...",
task.getId(),
worker.getHost(),
response.getStatus().getCode(),
response.getContent()
);
}
}
catch (ExecutionException ex) {
throw new RE(
ex,
"Request to assign task[%s] to worker[%s] failed. Retrying...",
task.getId(),
worker.getHost()
);
}
},
e -> !(e instanceof InterruptedException),
numTries
);
}
catch (Exception ex) {
log.info("Not sure whether task[%s] was successfully assigned to worker[%s].", task.getId(), worker.getHost());
return true;
}
}
public void shutdownTask(String taskId)
{
final URL url = TaskRunnerUtils.makeWorkerURL(worker, "/druid/worker/v1/task/%s/shutdown", taskId);
try {
RetryUtils.retry(
() -> {
try {
final StatusResponseHolder response = httpClient.go(
new Request(HttpMethod.POST, url),
StatusResponseHandler.getInstance(),
config.getShutdownRequestHttpTimeout().toStandardDuration()
).get();
if (response.getStatus().getCode() == 200) {
log.info(
"Sent shutdown message to worker: %s, status %s, response: %s",
worker.getHost(),
response.getStatus(),
response.getContent()
);
return null;
} else {
throw new RE("Attempt to shutdown task[%s] on worker[%s] failed.", taskId, worker.getHost());
}
}
catch (ExecutionException e) {
throw new RE(e, "Error in handling post to [%s] for task [%s]", worker.getHost(), taskId);
}
},
e -> !(e instanceof InterruptedException),
config.getShutdownRequestMaxRetries()
);
}
catch (Exception ex) {
if (ex instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
log.error("Failed to shutdown task[%s] on worker[%s] failed.", taskId, worker.getHost());
}
}
public void start()
{
syncer.start();
}
public void stop()
{
syncer.stop();
}
public void waitForInitialization() throws InterruptedException
{
if (!syncer.awaitInitialization(3 * syncer.getServerHttpTimeout(), TimeUnit.MILLISECONDS)) {
throw new RE("Failed to sync with worker[%s].", worker.getHost());
}
}
public ChangeRequestHttpSyncer<WorkerHistoryItem> getUnderlyingSyncer()
{
return syncer;
}
public ChangeRequestHttpSyncer.Listener<WorkerHistoryItem> createSyncListener()
{
return new ChangeRequestHttpSyncer.Listener<WorkerHistoryItem>()
{
@Override
public void fullSync(List<WorkerHistoryItem> changes)
{
ConcurrentMap<String, TaskAnnouncement> newSnapshot = new ConcurrentHashMap<>();
List<TaskAnnouncement> delta = new ArrayList<>();
boolean isWorkerDisabled = disabled.get();
for (WorkerHistoryItem change : changes) {
if (change instanceof WorkerHistoryItem.TaskUpdate) {
TaskAnnouncement announcement = ((WorkerHistoryItem.TaskUpdate) change).getTaskAnnouncement();
newSnapshot.put(announcement.getTaskId(), announcement);
delta.add(announcement);
} else if (change instanceof WorkerHistoryItem.Metadata) {
isWorkerDisabled = ((WorkerHistoryItem.Metadata) change).isDisabled();
} else {
log.makeAlert(
"Got unknown sync update[%s] from worker[%s]. Ignored.",
change.getClass().getName(),
worker.getHost()
).emit();
}
}
for (TaskAnnouncement announcement : tasksSnapshotRef.get().values()) {
if (!newSnapshot.containsKey(announcement.getTaskId()) && !announcement.getTaskStatus()
.isComplete()) {
log.warn(
"task[%s] in state[%s] suddenly disappeared on worker[%s]. failing it.",
announcement.getTaskId(),
announcement.getStatus(),
worker.getHost()
);
delta.add(TaskAnnouncement.create(
announcement.getTaskId(),
announcement.getTaskType(),
announcement.getTaskResource(),
TaskStatus.failure(announcement.getTaskId()),
announcement.getTaskLocation(),
announcement.getTaskDataSource()
));
}
}
tasksSnapshotRef.set(newSnapshot);
notifyListener(delta, isWorkerDisabled);
}
@Override
public void deltaSync(List<WorkerHistoryItem> changes)
{
List<TaskAnnouncement> delta = new ArrayList<>();
boolean isWorkerDisabled = disabled.get();
for (WorkerHistoryItem change : changes) {
if (change instanceof WorkerHistoryItem.TaskUpdate) {
TaskAnnouncement announcement = ((WorkerHistoryItem.TaskUpdate) change).getTaskAnnouncement();
tasksSnapshotRef.get().put(announcement.getTaskId(), announcement);
delta.add(announcement);
} else if (change instanceof WorkerHistoryItem.TaskRemoval) {
String taskId = ((WorkerHistoryItem.TaskRemoval) change).getTaskId();
TaskAnnouncement announcement = tasksSnapshotRef.get().remove(taskId);
if (announcement != null && !announcement.getTaskStatus().isComplete()) {
log.warn(
"task[%s] in state[%s] suddenly disappeared on worker[%s]. failing it.",
announcement.getTaskId(),
announcement.getStatus(),
worker.getHost()
);
delta.add(TaskAnnouncement.create(
announcement.getTaskId(),
announcement.getTaskType(),
announcement.getTaskResource(),
TaskStatus.failure(announcement.getTaskId()),
announcement.getTaskLocation(),
announcement.getTaskDataSource()
));
}
} else if (change instanceof WorkerHistoryItem.Metadata) {
isWorkerDisabled = ((WorkerHistoryItem.Metadata) change).isDisabled();
} else {
log.makeAlert(
"Got unknown sync update[%s] from worker[%s]. Ignored.",
change.getClass().getName(),
worker.getHost()
).emit();
}
}
notifyListener(delta, isWorkerDisabled);
}
private void notifyListener(List<TaskAnnouncement> announcements, boolean isWorkerDisabled)
{
for (TaskAnnouncement announcement : announcements) {
try {
listener.taskAddedOrUpdated(announcement, WorkerHolder.this);
}
catch (Exception ex) {
log.error(
ex,
"Unknown exception while updating task[%s] state from worker[%s].",
announcement.getTaskId(),
worker.getHost()
);
}
}
if (isWorkerDisabled != disabled.get()) {
disabled.set(isWorkerDisabled);
log.info("Worker[%s] disabled set to [%s].", worker.getHost(), isWorkerDisabled);
}
}
};
}
public interface Listener
{
void taskAddedOrUpdated(TaskAnnouncement announcement, WorkerHolder workerHolder);
}
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.utils;
import static java.util.Objects.requireNonNull;
import azkaban.Constants;
import azkaban.Constants.ConfigurationKeys;
import azkaban.alert.Alerter;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutionOptions;
import azkaban.executor.Status;
import azkaban.executor.mail.DefaultMailCreator;
import azkaban.executor.mail.MailCreator;
import azkaban.metrics.CommonMetrics;
import azkaban.sla.SlaOption;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.log4j.Logger;
@Singleton
public class Emailer extends AbstractMailer implements Alerter {
private static final String HTTPS = "https";
private static final String HTTP = "http";
private static final Logger logger = Logger.getLogger(Emailer.class);
private final CommonMetrics commonMetrics;
private final String scheme;
private final String clientHostname;
private final String clientPortNumber;
private final String mailHost;
private final int mailPort;
private final String mailUser;
private final String mailPassword;
private final String mailSender;
private final String azkabanName;
private final String tls;
private final boolean testMode;
@Inject
public Emailer(final Props props, final CommonMetrics commonMetrics) {
super(props);
this.commonMetrics = requireNonNull(commonMetrics, "commonMetrics is null.");
this.azkabanName = props.getString("azkaban.name", "azkaban");
this.mailHost = props.getString("mail.host", "localhost");
this.mailPort = props.getInt("mail.port", DEFAULT_SMTP_PORT);
this.mailUser = props.getString("mail.user", "");
this.mailPassword = props.getString("mail.password", "");
this.mailSender = props.getString("mail.sender", "");
this.tls = props.getString("mail.tls", "false");
final int mailTimeout = props.getInt("mail.timeout.millis", 30000);
EmailMessage.setTimeout(mailTimeout);
final int connectionTimeout =
props.getInt("mail.connection.timeout.millis", 30000);
EmailMessage.setConnectionTimeout(connectionTimeout);
EmailMessage.setTotalAttachmentMaxSize(getAttachmentMaxSize());
this.clientHostname = props.getString(ConfigurationKeys.AZKABAN_WEBSERVER_EXTERNAL_HOSTNAME,
props.getString("jetty.hostname", "localhost"));
if (props.getBoolean("jetty.use.ssl", true)) {
this.scheme = HTTPS;
this.clientPortNumber = Integer.toString(props
.getInt(ConfigurationKeys.AZKABAN_WEBSERVER_EXTERNAL_SSL_PORT,
props.getInt("jetty.ssl.port",
Constants.DEFAULT_SSL_PORT_NUMBER)));
} else {
this.scheme = HTTP;
this.clientPortNumber = Integer.toString(
props.getInt(ConfigurationKeys.AZKABAN_WEBSERVER_EXTERNAL_PORT, props.getInt("jetty.port",
Constants.DEFAULT_PORT_NUMBER)));
}
this.testMode = props.getBoolean("test.mode", false);
}
public static List<String> findFailedJobs(final ExecutableFlow flow) {
final ArrayList<String> failedJobs = new ArrayList<>();
for (final ExecutableNode node : flow.getExecutableNodes()) {
if (node.getStatus() == Status.FAILED) {
failedJobs.add(node.getId());
}
}
return failedJobs;
}
private void sendSlaAlertEmail(final SlaOption slaOption, final String slaMessage) {
final String subject =
"SLA violation for " + getJobOrFlowName(slaOption) + " on " + getAzkabanName();
final List<String> emailList =
(List<String>) slaOption.getInfo().get(SlaOption.INFO_EMAIL_LIST);
logger.info("Sending SLA email " + slaMessage);
sendEmail(emailList, subject, slaMessage);
}
/**
* Send an email to the specified email list
*/
public void sendEmail(final List<String> emailList, final String subject, final String body) {
if (emailList != null && !emailList.isEmpty()) {
final EmailMessage message =
super.createEmailMessage(subject, "text/html", emailList);
message.setBody(body);
if (!this.testMode) {
try {
message.sendEmail();
logger.info("Sent email message " + body);
this.commonMetrics.markSendEmailSuccess();
} catch (final Exception e) {
logger.error("Failed to send email message " + body, e);
this.commonMetrics.markSendEmailFail();
}
}
}
}
private String getJobOrFlowName(final SlaOption slaOption) {
final String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
final String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
if (org.apache.commons.lang.StringUtils.isNotBlank(jobName)) {
return flowName + ":" + jobName;
} else {
return flowName;
}
}
public void sendFirstErrorMessage(final ExecutableFlow flow) {
final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
this.mailPassword);
message.setFromAddress(this.mailSender);
message.setTLS(this.tls);
message.setAuth(super.hasMailAuth());
final ExecutionOptions option = flow.getExecutionOptions();
final MailCreator mailCreator =
DefaultMailCreator.getCreator(option.getMailCreator());
logger.debug("ExecutorMailer using mail creator:"
+ mailCreator.getClass().getCanonicalName());
final boolean mailCreated =
mailCreator.createFirstErrorMessage(flow, message, this.azkabanName, this.scheme,
this.clientHostname, this.clientPortNumber);
if (mailCreated && !this.testMode) {
try {
message.sendEmail();
logger.info("Sent first error email message for execution " + flow.getExecutionId());
this.commonMetrics.markSendEmailSuccess();
} catch (final Exception e) {
logger.error(
"Failed to send first error email message for execution " + flow.getExecutionId(), e);
this.commonMetrics.markSendEmailFail();
}
}
}
public void sendErrorEmail(final ExecutableFlow flow, final String... extraReasons) {
final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
this.mailPassword);
message.setFromAddress(this.mailSender);
message.setTLS(this.tls);
message.setAuth(super.hasMailAuth());
final ExecutionOptions option = flow.getExecutionOptions();
final MailCreator mailCreator =
DefaultMailCreator.getCreator(option.getMailCreator());
logger.debug("ExecutorMailer using mail creator:"
+ mailCreator.getClass().getCanonicalName());
final boolean mailCreated =
mailCreator.createErrorEmail(flow, message, this.azkabanName, this.scheme,
this.clientHostname, this.clientPortNumber, extraReasons);
if (mailCreated && !this.testMode) {
try {
message.sendEmail();
logger.info("Sent error email message for execution " + flow.getExecutionId());
this.commonMetrics.markSendEmailSuccess();
} catch (final Exception e) {
logger
.error("Failed to send error email message for execution " + flow.getExecutionId(), e);
this.commonMetrics.markSendEmailFail();
}
}
}
public void sendSuccessEmail(final ExecutableFlow flow) {
final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
this.mailPassword);
message.setFromAddress(this.mailSender);
message.setTLS(this.tls);
message.setAuth(super.hasMailAuth());
final ExecutionOptions option = flow.getExecutionOptions();
final MailCreator mailCreator =
DefaultMailCreator.getCreator(option.getMailCreator());
logger.debug("ExecutorMailer using mail creator:"
+ mailCreator.getClass().getCanonicalName());
final boolean mailCreated =
mailCreator.createSuccessEmail(flow, message, this.azkabanName, this.scheme,
this.clientHostname, this.clientPortNumber);
if (mailCreated && !this.testMode) {
try {
message.sendEmail();
logger.info("Sent success email message for execution " + flow.getExecutionId());
this.commonMetrics.markSendEmailSuccess();
} catch (final Exception e) {
logger.error("Failed to send success email message for execution " + flow.getExecutionId(),
e);
this.commonMetrics.markSendEmailFail();
}
}
}
@Override
public void alertOnSuccess(final ExecutableFlow exflow) {
sendSuccessEmail(exflow);
}
@Override
public void alertOnError(final ExecutableFlow exflow, final String... extraReasons) {
sendErrorEmail(exflow, extraReasons);
}
@Override
public void alertOnFirstError(final ExecutableFlow exflow) {
sendFirstErrorMessage(exflow);
}
@Override
public void alertOnSla(final SlaOption slaOption, final String slaMessage) {
sendSlaAlertEmail(slaOption, slaMessage);
}
}
| |
import android.content.Context;
import android.content.Intent;
import android.text.TextUtils;
import android.util.Log;
import android.util.SparseArray;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public final class imt
implements giw, ilb
{
final SparseArray<Intent> b = new SparseArray();
final hgs c;
final giz d;
final kbu e;
final ima f;
ilo g;
private final Context h;
private final List<ilc> i = new ArrayList();
private final Object j = new Object();
private final gkr k;
private final ExecutorService l;
private gjm m;
private List<ilp> n;
public imt(Context paramContext)
{
this(paramContext, Executors.newCachedThreadPool(new imu()));
}
private imt(Context paramContext, ExecutorService paramExecutorService)
{
this.h = paramContext;
this.l = paramExecutorService;
this.c = ((hgs)mbb.a(paramContext, hgs.class));
this.m = ((gjm)mbb.a(paramContext, gjm.class));
this.k = ((gkr)mbb.a(paramContext, gkr.class));
this.d = ((giz)mbb.a(paramContext, giz.class));
this.e = ((kbu)mbb.a(paramContext, kbu.class));
this.n = mbb.c(paramContext, ilp.class);
this.g = ((ilo)mbb.b(paramContext, ilo.class));
this.f = ((ima)mbb.a(paramContext, ima.class));
}
private final boolean a(int paramInt, ild paramild)
{
long l1;
if (paramInt != -1)
{
gjb localgjb = this.d.a(paramInt);
l1 = localgjb.a("LoginManager.last_updated", 0L);
if (localgjb.c("logged_out")) {
break label75;
}
if (!c(paramInt)) {
break label48;
}
}
label48:
while ((!paramild.a) && (this.c.a() - l1 >= paramild.b)) {
return true;
}
label75:
return false;
}
static boolean a(gjc paramgjc, List<ilh> paramList)
{
boolean bool = paramgjc.c("logged_in");
int i1;
if (paramList != null)
{
Iterator localIterator = paramList.iterator();
i1 = 1;
if (localIterator.hasNext())
{
ilh localilh = (ilh)localIterator.next();
if ((i1 != 0) && (localilh.a(paramgjc))) {}
for (int i2 = 1;; i2 = 0)
{
i1 = i2;
break;
}
}
}
else
{
i1 = 1;
}
if (i1 == 0) {
return false;
}
if (!bool)
{
if ((paramgjc.c("logged_out")) && (!paramgjc.c("has_irrecoverable_error"))) {
paramgjc.c();
}
paramgjc.b("logged_in", true).b("logged_out", false);
}
return true;
}
static ild b()
{
ile localile = new ile();
localile.a = true;
localile.b = 0L;
localile.c = false;
return new ild(localile);
}
private final void b(List<imw> paramList)
{
efj.l();
int i1 = 0;
this.b.clear();
for (;;)
{
int i2;
ArrayList localArrayList1;
synchronized (this.j)
{
int[] arrayOfInt = new int[2];
arrayOfInt[0] = ils.a;
arrayOfInt[1] = ils.b;
i2 = 0;
if (i2 >= 2) {
break label438;
}
int i3 = arrayOfInt[i2];
localArrayList1 = new ArrayList();
Iterator localIterator1 = this.n.iterator();
if (localIterator1.hasNext())
{
ilp localilp = (ilp)localIterator1.next();
if (localilp.a() != i3) {
continue;
}
localArrayList1.add(localilp);
}
}
Iterator localIterator2 = paramList.iterator();
imw localimw2;
while (localIterator2.hasNext())
{
localimw2 = (imw)localIterator2.next();
if (!localimw2.j) {
if (localimw2.a.c)
{
localimw2.f = null;
localimw2.g = true;
localimw2.h = new ArrayList();
}
else
{
localimw2.f = new kcg(localimw2.a.a, localimw2.b, null, localimw2.d, null);
if (localimw2.l.c(localimw2.i)) {
break label456;
}
if (localimw2.d) {
break label462;
}
break label456;
}
}
}
Object localObject3;
int i4;
int i5;
ArrayList localArrayList3;
for (;;)
{
localimw2.g = bool;
localimw2.h = new ArrayList(localArrayList1);
break;
localObject3 = new ArrayList(paramList);
i4 = 0;
i5 = i1;
if ((i4 >= 3) || (((List)localObject3).isEmpty())) {
break label502;
}
ArrayList localArrayList2 = new ArrayList();
Iterator localIterator3 = ((List)localObject3).iterator();
while (localIterator3.hasNext()) {
((imw)localIterator3.next()).a(localArrayList2);
}
try
{
this.l.invokeAll(localArrayList2);
localArrayList3 = new ArrayList();
Iterator localIterator4 = ((List)localObject3).iterator();
while (localIterator4.hasNext())
{
imw localimw1 = (imw)localIterator4.next();
if (!localimw1.a()) {
localArrayList3.add(localimw1);
}
}
label438:
if (i1 != 0) {
Thread.currentThread().interrupt();
}
c(paramList);
return;
}
catch (InterruptedException localInterruptedException)
{
label456:
label462:
i6 = 1;
localObject4 = localObject3;
i4++;
i5 = i6;
localObject3 = localObject4;
}
boolean bool = true;
continue;
bool = false;
}
for (;;)
{
break;
Object localObject4 = localArrayList3;
int i6 = i5;
}
label502:
i2++;
i1 = i5;
}
}
private final void c(List<imw> paramList)
{
int i1 = 0;
int i2 = 0;
int i3 = 0;
if (i1 < 3)
{
localArrayList = new ArrayList(paramList.size());
localIterator = paramList.iterator();
while (localIterator.hasNext()) {
localArrayList.add(new imv(this, (imw)localIterator.next()));
}
try
{
this.l.invokeAll(localArrayList);
i4 = i2;
i5 = 1;
}
catch (InterruptedException localInterruptedException)
{
for (;;)
{
i4 = 1;
i5 = i3;
}
i1++;
i3 = i5;
i2 = i4;
}
if (i5 == 0) {}
}
while (i2 == 0)
{
ArrayList localArrayList;
Iterator localIterator;
int i4;
int i5;
return;
break;
}
Thread.currentThread().interrupt();
}
private final void e(int paramInt)
{
List localList = mbb.c(this.h, ilt.class);
int i1 = localList.size();
for (int i2 = 0; i2 < i1; i2++) {
((ilt)localList.get(i2)).a(paramInt);
}
}
final ilx a(String paramString1, String paramString2, ilf paramilf, boolean paramBoolean)
{
ilx localilx = new ilx();
try
{
gjj localgjj = this.m.a(paramString1);
if (localgjj != null)
{
imw localimw = new imw(this, localgjj, paramString2, paramilf, paramBoolean);
b(Collections.singletonList(localimw));
localilx.a = true;
localimw.a(localilx);
}
return localilx;
}
catch (gjo localgjo)
{
localilx.e = Collections.singletonList(localgjo);
}
return localilx;
}
final List<ilh> a(List<Class<? extends ilh>> paramList)
{
ArrayList localArrayList = new ArrayList();
Iterator localIterator = paramList.iterator();
while (localIterator.hasNext())
{
Class localClass = (Class)localIterator.next();
localArrayList.add((ilh)mbb.a(this.h, localClass));
}
return localArrayList;
}
public final void a(int paramInt)
{
b(paramInt);
}
public final void a(bw parambw, ilf paramilf, String paramString)
{
imm localimm = imm.a(parambw);
if (localimm.ab)
{
localimm.a(true);
localimm.a.a(localimm.b, localimm.c, -1);
}
localimm.ab = true;
localimm.b = paramilf;
localimm.c = paramString;
localimm.d = null;
localimm.Z = null;
localimm.aa = -1;
localimm.v();
}
public final void a(ilc paramilc)
{
this.i.add(paramilc);
}
public final void a(ild paramild)
{
b(paramild);
}
final void a(ilf paramilf, String paramString, int paramInt)
{
Iterator localIterator = this.i.iterator();
while (localIterator.hasNext()) {
((ilc)localIterator.next()).a(paramilf, paramString, paramInt);
}
}
public final void a(String paramString, ild paramild)
{
int i1 = this.d.a(paramString);
if ((i1 != -1) && (this.d.a(i1).c("logged_out"))) {
throw new ikm("refreshAccount called for a logged out account");
}
if (a(i1, paramild)) {
a(paramString, null, null, paramild.c);
}
}
final boolean a()
{
ild localild = b();
HashSet localHashSet = new HashSet();
for (;;)
{
int i2;
try
{
gjj[] arrayOfgjj = this.m.a();
int i1 = arrayOfgjj.length;
i2 = 0;
if (i2 >= i1) {
break;
}
String str = arrayOfgjj[i2].a;
localHashSet.add(str);
int i4 = this.d.a(str);
if (a(i4, localild))
{
new StringBuilder(68).append("Account ").append(i4).append(" is not ready for login because it needs refresh.");
return false;
}
}
catch (gjo localgjo)
{
if (Log.isLoggable("LoginManager", 6)) {
Log.e("LoginManager", "Failed to obtain device accounts when checking if accounts are ready for login", localgjo);
}
return false;
}
i2++;
}
Iterator localIterator = this.d.a().iterator();
while (localIterator.hasNext())
{
int i3 = ((Integer)localIterator.next()).intValue();
if (!localHashSet.contains(this.d.a(i3).b("account_name")))
{
new StringBuilder(87).append("Account ").append(i3).append(" is not ready for login because account store has a removed account.");
return false;
}
}
return true;
}
public final boolean a(ilf paramilf, int paramInt)
{
boolean bool = paramilf.d;
if (!this.d.c(paramInt)) {}
while ((b(paramilf, paramInt) != null) || ((bool) && (!this.d.a(paramInt).c("logged_in")))) {
return false;
}
return true;
}
final ilh b(ilf paramilf, int paramInt)
{
ArrayList localArrayList = new ArrayList();
gjb localgjb = this.d.a(paramInt);
if (this.g != null) {
this.g.a(localgjb.b("account_name"), localArrayList);
}
localArrayList.addAll(paramilf.u);
Iterator localIterator = a(localArrayList).iterator();
while (localIterator.hasNext())
{
ilh localilh = (ilh)localIterator.next();
if (!localilh.a(localgjb)) {
return localilh;
}
}
return null;
}
final ilx b(ild paramild)
{
this.k.a();
for (;;)
{
int i2;
ilx localilx;
synchronized (this.j)
{
efj.l();
gjj[] arrayOfgjj = this.m.a();
ArrayList localArrayList = new ArrayList(arrayOfgjj.length);
int i1 = arrayOfgjj.length;
i2 = 0;
if (i2 < i1)
{
gjj localgjj = arrayOfgjj[i2];
String str = localgjj.a;
int i3 = this.d.a(str);
boolean bool1 = a(i3, paramild);
new StringBuilder(40).append("Account ").append(i3).append(" needs refresh: ").append(bool1);
if (!bool1) {
break label271;
}
localArrayList.add(new imw(this, localgjj, null, null, paramild.c));
break label271;
}
localilx = new ilx();
b(localArrayList);
localilx.a = true;
Iterator localIterator = localArrayList.iterator();
if (localIterator.hasNext())
{
((imw)localIterator.next()).a(localilx);
int i4 = localilx.d;
boolean bool2 = localilx.a;
new StringBuilder(45).append("Account update for ").append(i4).append(" success: ").append(bool2);
}
}
return localilx;
label271:
i2++;
}
}
public final void b(int paramInt)
{
if (!this.d.d(paramInt)) {
return;
}
synchronized (this.j)
{
d(paramInt);
return;
}
}
public final void b(ilc paramilc)
{
this.i.remove(paramilc);
}
final boolean c(int paramInt)
{
if (paramInt == -1) {}
gjb localgjb;
do
{
return true;
localgjb = this.d.a(paramInt);
} while (!TextUtils.equals(this.f.a(), localgjb.b("LoginManager.build_version")));
return false;
}
final void d(int paramInt)
{
if ((!this.d.d(paramInt)) && (this.d.e(paramInt))) {
return;
}
gjb localgjb1 = this.d.a(paramInt);
if (!localgjb1.c("is_managed_account"))
{
String str = localgjb1.b("account_name");
List localList = this.d.a();
int i1 = localList.size();
for (int i2 = 0; i2 < i1; i2++)
{
Integer localInteger = (Integer)localList.get(i2);
gjb localgjb2 = this.d.a(localInteger.intValue());
if ((localgjb2.a()) && (localgjb2.c("is_managed_account")) && (localgjb2.b("account_name").equals(str))) {
d(localInteger.intValue());
}
}
}
this.d.b(paramInt).b("logged_out", true).b("logged_in", false).d();
e(paramInt);
}
}
/* Location: F:\apktool\apktool\com.google.android.apps.plus\classes-dex2jar.jar
* Qualified Name: imt
* JD-Core Version: 0.7.0.1
*/
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DisassociateRouteTableResponseType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* DisassociateRouteTableResponseType bean class
*/
public class DisassociateRouteTableResponseType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = DisassociateRouteTableResponseType
Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for RequestId
*/
protected java.lang.String localRequestId ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getRequestId(){
return localRequestId;
}
/**
* Auto generated setter method
* @param param RequestId
*/
public void setRequestId(java.lang.String param){
this.localRequestId=param;
}
/**
* field for _return
*/
protected boolean local_return ;
/**
* Auto generated getter method
* @return boolean
*/
public boolean get_return(){
return local_return;
}
/**
* Auto generated setter method
* @param param _return
*/
public void set_return(boolean param){
this.local_return=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DisassociateRouteTableResponseType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":DisassociateRouteTableResponseType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"DisassociateRouteTableResponseType",
xmlWriter);
}
}
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"requestId", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"requestId");
}
} else {
xmlWriter.writeStartElement("requestId");
}
if (localRequestId==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}else{
xmlWriter.writeCharacters(localRequestId);
}
xmlWriter.writeEndElement();
namespace = "http://ec2.amazonaws.com/doc/2012-08-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"return", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"return");
}
} else {
xmlWriter.writeStartElement("return");
}
if (false) {
throw new org.apache.axis2.databinding.ADBException("return cannot be null!!");
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
}
xmlWriter.writeEndElement();
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"requestId"));
if (localRequestId != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localRequestId));
} else {
throw new org.apache.axis2.databinding.ADBException("requestId cannot be null!!");
}
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/",
"return"));
elementList.add(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(local_return));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DisassociateRouteTableResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DisassociateRouteTableResponseType object =
new DisassociateRouteTableResponseType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"DisassociateRouteTableResponseType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (DisassociateRouteTableResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","requestId").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setRequestId(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","return").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.set_return(
org.apache.axis2.databinding.utils.ConverterUtil.convertToBoolean(content));
reader.next();
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.xcode.plmerge;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.io.ByteSource;
import com.google.devtools.build.xcode.common.Platform;
import com.google.devtools.build.xcode.util.Equaling;
import com.google.devtools.build.xcode.util.Mapping;
import com.google.devtools.build.xcode.util.Value;
import com.dd.plist.BinaryPropertyListWriter;
import com.dd.plist.NSArray;
import com.dd.plist.NSDictionary;
import com.dd.plist.NSObject;
import com.dd.plist.NSString;
import com.dd.plist.PropertyListFormatException;
import com.dd.plist.PropertyListParser;
import org.xml.sax.SAXException;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.parsers.ParserConfigurationException;
/**
* Utility code for merging project files.
*/
public class PlistMerging extends Value<PlistMerging> {
private static final String BUNDLE_IDENTIFIER_PLIST_KEY = "CFBundleIdentifier";
private static final ImmutableBiMap<String, Integer> DEVICE_FAMILIES =
ImmutableBiMap.of("IPHONE", 1, "IPAD", 2);
/**
* Exception type thrown when validation of the plist file fails.
*/
public static class ValidationException extends RuntimeException {
ValidationException(String message) {
super(message);
}
}
private final NSDictionary merged;
@VisibleForTesting
PlistMerging(NSDictionary merged) {
super(merged);
this.merged = merged;
}
/**
* Merges several plist files into a single {@code NSDictionary}. Each file should be a plist (of
* one of these formats: ASCII, Binary, or XML) that contains an NSDictionary.
*/
@VisibleForTesting
static NSDictionary merge(Iterable<? extends Path> sourceFilePaths) throws IOException {
NSDictionary result = new NSDictionary();
for (Path sourceFilePath : sourceFilePaths) {
result.putAll(readPlistFile(sourceFilePath));
}
return result;
}
public static NSDictionary readPlistFile(final Path sourceFilePath) throws IOException {
ByteSource rawBytes = new Utf8BomSkippingByteSource(sourceFilePath);
try {
try (InputStream in = rawBytes.openStream()) {
return (NSDictionary) PropertyListParser.parse(in);
} catch (PropertyListFormatException | ParseException e) {
// If we failed to parse, the plist may implicitly be a map. To handle this, wrap the plist
// with {}.
// TODO(bazel-team): Do this in a cleaner way.
ByteSource concatenated = ByteSource.concat(
ByteSource.wrap(new byte[] {'{'}),
rawBytes,
ByteSource.wrap(new byte[] {'}'}));
try (InputStream in = concatenated.openStream()) {
return (NSDictionary) PropertyListParser.parse(in);
}
}
} catch (PropertyListFormatException | ParseException | ParserConfigurationException
| SAXException e) {
throw new IOException(e);
}
}
/**
* Writes the results of a merge operation to a plist file.
* @param plistPath the path of the plist to write in binary format
*/
public void writePlist(Path plistPath) throws IOException {
try (OutputStream out = Files.newOutputStream(plistPath)) {
BinaryPropertyListWriter.write(out, merged);
}
}
/**
* Writes a PkgInfo file based on certain keys in the merged plist.
* @param pkgInfoPath the path of the PkgInfo file to write. In many iOS apps, this file just
* contains the raw string {@code APPL????}.
*/
public void writePkgInfo(Path pkgInfoPath) throws IOException {
String pkgInfo =
Mapping.of(merged, "CFBundlePackageType").or(NSObject.wrap("APPL")).toString()
+ Mapping.of(merged, "CFBundleSignature").or(NSObject.wrap("????")).toString();
Files.write(pkgInfoPath, pkgInfo.getBytes(StandardCharsets.UTF_8));
}
/** Invokes {@link #writePlist(Path)} and {@link #writePkgInfo(Path)}. */
public void write(Path plistPath, Path pkgInfoPath) throws IOException {
writePlist(plistPath);
writePkgInfo(pkgInfoPath);
}
/**
* Returns a map containing entries that should be added to the merged plist. These are usually
* generated by Xcode automatically during the build process.
*/
public static Map<String, NSObject> automaticEntries(
Collection<String> targetedDeviceFamily, Platform platform, String sdkVersion,
String minimumOsVersion) {
ImmutableMap.Builder<String, NSObject> result = new ImmutableMap.Builder<>();
List<Integer> uiDeviceFamily = FluentIterable.from(targetedDeviceFamily)
.transform(Maps.asConverter(DEVICE_FAMILIES))
.toSortedList(Ordering.natural());
result.put("UIDeviceFamily", NSObject.wrap(uiDeviceFamily.toArray()));
result.put("DTPlatformName", NSObject.wrap(platform.getLowerCaseNameInPlist()));
result.put("DTSDKName", NSObject.wrap(platform.getLowerCaseNameInPlist() + sdkVersion));
result.put("CFBundleSupportedPlatforms", new NSArray(NSObject.wrap(platform.getNameInPlist())));
if (platform == Platform.DEVICE) {
// TODO(bazel-team): Figure out if there are more appropriate values to put here, or if any
// can be omitted. These have been copied from a plist file generated by Xcode for a device
// build.
result.put("DTCompiler", NSObject.wrap("com.apple.compilers.llvm.clang.1_0"));
result.put("BuildMachineOSBuild", NSObject.wrap("13D65"));
result.put("DTPlatformBuild", NSObject.wrap("11B508"));
result.put("DTSDKBuild", NSObject.wrap("11B508"));
result.put("DTXcode", NSObject.wrap("0502"));
result.put("DTXcodeBuild", NSObject.wrap("5A3005"));
result.put("DTPlatformVersion", NSObject.wrap(sdkVersion));
result.put("MinimumOSVersion", NSObject.wrap(minimumOsVersion));
}
return result.build();
}
/**
* Generates final merged Plist file and PkgInfo file in the specified locations, and includes the
* "automatic" entries in the Plist.
*/
public static PlistMerging from(List<Path> sourceFiles, Map<String, NSObject> automaticEntries,
Map<String, String> substitutions, KeysToRemoveIfEmptyString keysToRemoveIfEmptyString)
throws IOException {
NSDictionary merged = PlistMerging.merge(sourceFiles);
Set<String> conflictingEntries = Sets.intersection(automaticEntries.keySet(), merged.keySet());
Preconditions.checkArgument(conflictingEntries.isEmpty(),
"The following plist entries are generated automatically, but are present in more than one "
+ "of the input lists: %s", conflictingEntries);
merged.putAll(automaticEntries);
for (Map.Entry<String, NSObject> entry : merged.entrySet()) {
if (entry.getValue().toJavaObject() instanceof String) {
String newValue = substituteEnvironmentVariable(
substitutions, (String) entry.getValue().toJavaObject());
merged.put(entry.getKey(), newValue);
}
}
for (String key : keysToRemoveIfEmptyString) {
if (Equaling.of(Mapping.of(merged, key), Optional.<NSObject>of(new NSString("")))) {
merged.remove(key);
}
}
return new PlistMerging(merged);
}
// Assume that if an RFC 1034 format string is specified, the value is RFC 1034 compliant.
private static String substituteEnvironmentVariable(
Map<String, String> substitutions, String string) {
// The substitution is *not* performed recursively.
for (Map.Entry<String, String> variable : substitutions.entrySet()) {
for (String variableNameWithFormatString : withFormatStrings(variable.getKey())) {
string = string
.replace("${" + variableNameWithFormatString + "}", variable.getValue())
.replace("$(" + variableNameWithFormatString + ")", variable.getValue());
}
}
return string;
}
private static ImmutableSet<String> withFormatStrings(String variableName) {
return ImmutableSet.of(variableName, variableName + ":rfc1034identifier");
}
@VisibleForTesting
NSDictionary asDictionary() {
return merged;
}
/**
* Sets the given executable name on this merged plist in the {@code CFBundleExecutable}
* attribute.
*
* @param executableName name of the bundle executable
* @return this plist merging
* @throws ValidationException if the plist already contains an incompatible
* {@code CFBundleExecutable} entry
*/
public PlistMerging setExecutableName(String executableName) {
NSString bundleExecutable = (NSString) merged.get("CFBundleExecutable");
if (bundleExecutable == null) {
merged.put("CFBundleExecutable", executableName);
} else if (!executableName.equals(bundleExecutable.getContent())) {
throw new ValidationException(String.format(
"Blaze generated the executable %s but the Plist CFBundleExecutable is %s",
executableName, bundleExecutable));
}
return this;
}
/**
* Sets the given identifier on this merged plist in the {@code CFBundleIdentifier}
* attribute.
*
* @param primaryIdentifier used to set the bundle identifier or override the existing one from
* plist file, can be null
* @param fallbackIdentifier used to set the bundle identifier if it is not set by plist file or
* primary identifier, can be null
* @return this plist merging
*/
public PlistMerging setBundleIdentifier(String primaryIdentifier, String fallbackIdentifier) {
NSString bundleIdentifier = (NSString) merged.get(BUNDLE_IDENTIFIER_PLIST_KEY);
if (primaryIdentifier != null) {
merged.put(BUNDLE_IDENTIFIER_PLIST_KEY, primaryIdentifier);
} else if (bundleIdentifier == null && fallbackIdentifier != null) {
merged.put(BUNDLE_IDENTIFIER_PLIST_KEY, fallbackIdentifier);
}
return this;
}
private static class Utf8BomSkippingByteSource extends ByteSource {
private static final byte[] UTF8_BOM =
new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF };
private final Path path;
public Utf8BomSkippingByteSource(Path path) {
this.path = path;
}
@Override
public InputStream openStream() throws IOException {
InputStream stream = new BufferedInputStream(Files.newInputStream(path));
stream.mark(UTF8_BOM.length);
byte[] buffer = new byte[UTF8_BOM.length];
int read = stream.read(buffer);
stream.reset();
buffer = Arrays.copyOf(buffer, read);
if (UTF8_BOM.length == read && Arrays.equals(buffer, UTF8_BOM)) {
stream.skip(UTF8_BOM.length);
}
return stream;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.flatpack;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import net.sf.flatpack.DataSet;
import net.sf.flatpack.DefaultParserFactory;
import net.sf.flatpack.Parser;
import net.sf.flatpack.ParserFactory;
import net.sf.flatpack.writer.DelimiterWriterFactory;
import net.sf.flatpack.writer.FixedWriterFactory;
import net.sf.flatpack.writer.Writer;
import org.apache.camel.Exchange;
import org.apache.camel.component.flatpack.DataSetList;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.spi.DataFormatName;
import org.apache.camel.spi.annotations.Dataformat;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Flatpack DataFormat.
* <p/>
* This data format supports two operations:
* <ul>
* <li>marshal = from <tt>List<Map<String, Object>></tt> to <tt>OutputStream</tt> (can be converted to String)</li>
* <li>unmarshal = from <tt>InputStream</tt> (such as a File) to {@link org.apache.camel.component.flatpack.DataSetList}.
* </ul>
* <b>Notice:</b> The Flatpack library does currently not support header and trailers for the marshal operation.
*/
@Dataformat("flatpack")
public class FlatpackDataFormat extends ServiceSupport implements DataFormat, DataFormatName {
private static final Logger LOG = LoggerFactory.getLogger(FlatpackDataFormat.class);
private ParserFactory parserFactory = DefaultParserFactory.getInstance();
private char delimiter = ',';
private char textQualifier = '"';
private boolean ignoreFirstRecord = true;
private boolean fixed;
private boolean allowShortLines;
private boolean ignoreExtraColumns;
private String definition;
@Override
public String getDataFormatName() {
return "flatback";
}
@SuppressWarnings("unchecked")
public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception {
ObjectHelper.notNull(graph, "The object to marshal must be provided");
List<Map<String, Object>> data = (List<Map<String, Object>>) graph;
if (data.isEmpty()) {
LOG.warn("No data to marshal as the list is empty");
return;
}
Map<String, Object> firstRow = data.get(0);
Writer writer = createWriter(exchange, firstRow, stream);
try {
boolean first = true;
writer.printHeader();
for (Map<String, Object> row : data) {
if (ignoreFirstRecord && first) {
// skip first row
first = false;
continue;
}
for (Entry<String, Object> entry : row.entrySet()) {
writer.addRecordEntry(entry.getKey(), entry.getValue());
}
writer.nextRecord();
}
writer.printFooter();
} finally {
writer.flush();
writer.close();
}
}
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
InputStreamReader reader = new InputStreamReader(stream, ExchangeHelper.getCharsetName(exchange));
try {
Parser parser = createParser(exchange, reader);
DataSet dataSet = parser.parse();
return new DataSetList(dataSet);
} finally {
reader.close();
}
}
@Override
protected void doStart() throws Exception {
// noop
}
@Override
protected void doStop() throws Exception {
// noop
}
// Properties
//-------------------------------------------------------------------------
public String getDefinition() {
return definition;
}
public void setDefinition(String definition) {
this.definition = definition;
}
public boolean isFixed() {
return fixed;
}
public void setFixed(boolean fixed) {
this.fixed = fixed;
}
public char getDelimiter() {
return delimiter;
}
public void setDelimiter(char delimiter) {
this.delimiter = delimiter;
}
public boolean isIgnoreFirstRecord() {
return ignoreFirstRecord;
}
public void setIgnoreFirstRecord(boolean ignoreFirstRecord) {
this.ignoreFirstRecord = ignoreFirstRecord;
}
public char getTextQualifier() {
return textQualifier;
}
public void setTextQualifier(char textQualifier) {
this.textQualifier = textQualifier;
}
public ParserFactory getParserFactory() {
return parserFactory;
}
public void setParserFactory(ParserFactory parserFactory) {
this.parserFactory = parserFactory;
}
public boolean isAllowShortLines() {
return this.allowShortLines;
}
/**
* Allows for lines to be shorter than expected and ignores the extra characters
*/
public void setAllowShortLines(boolean allowShortLines) {
this.allowShortLines = allowShortLines;
}
/**
* Allows for lines to be longer than expected and ignores the extra characters
*/
public void setIgnoreExtraColumns(boolean ignoreExtraColumns) {
this.ignoreExtraColumns = ignoreExtraColumns;
}
public boolean isIgnoreExtraColumns() {
return ignoreExtraColumns;
}
// Implementation methods
//-------------------------------------------------------------------------
protected Parser createParser(Exchange exchange, Reader bodyReader) throws IOException {
if (isFixed()) {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(exchange.getContext(), getDefinition());
InputStreamReader reader = new InputStreamReader(is, ExchangeHelper.getCharsetName(exchange));
Parser parser = getParserFactory().newFixedLengthParser(reader, bodyReader);
if (allowShortLines) {
parser.setHandlingShortLines(true);
parser.setIgnoreParseWarnings(true);
}
if (ignoreExtraColumns) {
parser.setIgnoreExtraColumns(true);
parser.setIgnoreParseWarnings(true);
}
return parser;
} else {
if (ObjectHelper.isEmpty(getDefinition())) {
return getParserFactory().newDelimitedParser(bodyReader, delimiter, textQualifier);
} else {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(exchange.getContext(), getDefinition());
InputStreamReader reader = new InputStreamReader(is, ExchangeHelper.getCharsetName(exchange));
Parser parser = getParserFactory().newDelimitedParser(reader, bodyReader, delimiter, textQualifier, ignoreFirstRecord);
if (allowShortLines) {
parser.setHandlingShortLines(true);
parser.setIgnoreParseWarnings(true);
}
if (ignoreExtraColumns) {
parser.setIgnoreExtraColumns(true);
parser.setIgnoreParseWarnings(true);
}
return parser;
}
}
}
private Writer createWriter(Exchange exchange, Map<String, Object> firstRow, OutputStream stream) throws IOException {
if (isFixed()) {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(exchange.getContext(), getDefinition());
InputStreamReader reader = new InputStreamReader(is, ExchangeHelper.getCharsetName(exchange));
FixedWriterFactory factory = new FixedWriterFactory(reader);
return factory.createWriter(new OutputStreamWriter(stream, ExchangeHelper.getCharsetName(exchange)));
} else {
if (getDefinition() == null) {
DelimiterWriterFactory factory = new DelimiterWriterFactory(delimiter, textQualifier);
// add columns from the keys in the data map as the columns must be known
for (String key : firstRow.keySet()) {
factory.addColumnTitle(key);
}
return factory.createWriter(new OutputStreamWriter(stream, ExchangeHelper.getCharsetName(exchange)));
} else {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(exchange.getContext(), getDefinition());
InputStreamReader reader = new InputStreamReader(is, ExchangeHelper.getCharsetName(exchange));
DelimiterWriterFactory factory = new DelimiterWriterFactory(reader, delimiter, textQualifier);
return factory.createWriter(new OutputStreamWriter(stream, ExchangeHelper.getCharsetName(exchange)));
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.execute;
import java.lang.reflect.Field;
import java.nio.ByteOrder;
import java.security.AccessController;
import java.security.PrivilegedAction;
import sun.misc.Unsafe;
import com.google.common.primitives.Longs;
import com.google.common.primitives.UnsignedBytes;
/**
* Utility code to do optimized byte-array comparison.
* This is borrowed from org.apache.hadoop.io.FastByteComparisons
* which was borrowed and slightly modified from Guava's {@link UnsignedBytes}
* class to be able to compare arrays that start at non-zero offsets.
*
* The only difference is that we sort a smaller length bytes as *larger*
* than longer length bytes when all the bytes are the same.
*/
@SuppressWarnings("restriction")
public class DescVarLengthFastByteComparisons {
private DescVarLengthFastByteComparisons() {}
/**
* Lexicographically compare two byte arrays.
*/
public static int compareTo(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return LexicographicalComparerHolder.BEST_COMPARER.compareTo(b1, s1, l1, b2, s2, l2);
}
private interface Comparer<T> {
abstract public int compareTo(T buffer1, int offset1, int length1, T buffer2, int offset2, int length2);
}
private static Comparer<byte[]> lexicographicalComparerJavaImpl() {
return LexicographicalComparerHolder.PureJavaComparer.INSTANCE;
}
/**
* Provides a lexicographical comparer implementation; either a Java implementation or a faster implementation based
* on {@link Unsafe}.
* <p>
* Uses reflection to gracefully fall back to the Java implementation if {@code Unsafe} isn't available.
*/
private static class LexicographicalComparerHolder {
static final String UNSAFE_COMPARER_NAME = LexicographicalComparerHolder.class.getName() + "$UnsafeComparer";
static final Comparer<byte[]> BEST_COMPARER = getBestComparer();
/**
* Returns the Unsafe-using Comparer, or falls back to the pure-Java implementation if unable to do so.
*/
static Comparer<byte[]> getBestComparer() {
try {
Class<?> theClass = Class.forName(UNSAFE_COMPARER_NAME);
// yes, UnsafeComparer does implement Comparer<byte[]>
@SuppressWarnings("unchecked")
Comparer<byte[]> comparer = (Comparer<byte[]>)theClass.getEnumConstants()[0];
return comparer;
} catch (Throwable t) { // ensure we really catch *everything*
return lexicographicalComparerJavaImpl();
}
}
private enum PureJavaComparer implements Comparer<byte[]> {
INSTANCE;
@Override
public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2, int length2) {
// Short circuit equal case
if (buffer1 == buffer2 && offset1 == offset2 && length1 == length2) { return 0; }
// Bring WritableComparator code local
int end1 = offset1 + length1;
int end2 = offset2 + length2;
for (int i = offset1, j = offset2; i < end1 && j < end2; i++, j++) {
int a = (buffer1[i] & 0xff);
int b = (buffer2[j] & 0xff);
if (a != b) { return a - b; }
}
return length2 - length1;
}
}
@SuppressWarnings("unused")
// used via reflection
private enum UnsafeComparer implements Comparer<byte[]> {
INSTANCE;
static final Unsafe theUnsafe;
/** The offset to the first element in a byte array. */
static final int BYTE_ARRAY_BASE_OFFSET;
static {
theUnsafe = (Unsafe)AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null);
} catch (NoSuchFieldException e) {
// It doesn't matter what we throw;
// it's swallowed in getBestComparer().
throw new Error();
} catch (IllegalAccessException e) {
throw new Error();
}
}
});
BYTE_ARRAY_BASE_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
// sanity check - this should never fail
if (theUnsafe.arrayIndexScale(byte[].class) != 1) { throw new AssertionError(); }
}
static final boolean littleEndian = ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
/**
* Returns true if x1 is less than x2, when both values are treated as unsigned.
*/
static boolean lessThanUnsigned(long x1, long x2) {
return (x1 + Long.MIN_VALUE) < (x2 + Long.MIN_VALUE);
}
/**
* Lexicographically compare two arrays.
*
* @param buffer1
* left operand
* @param buffer2
* right operand
* @param offset1
* Where to start comparing in the left buffer
* @param offset2
* Where to start comparing in the right buffer
* @param length1
* How much to compare from the left buffer
* @param length2
* How much to compare from the right buffer
* @return 0 if equal, < 0 if left is less than right, etc.
*/
@Override
public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2, int length2) {
// Short circuit equal case
if (buffer1 == buffer2 && offset1 == offset2 && length1 == length2) { return 0; }
int minLength = Math.min(length1, length2);
int minWords = minLength / Longs.BYTES;
int offset1Adj = offset1 + BYTE_ARRAY_BASE_OFFSET;
int offset2Adj = offset2 + BYTE_ARRAY_BASE_OFFSET;
/*
* Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes at a time is no slower than comparing
* 4 bytes at a time even on 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (int i = 0; i < minWords * Longs.BYTES; i += Longs.BYTES) {
long lw = theUnsafe.getLong(buffer1, offset1Adj + (long)i);
long rw = theUnsafe.getLong(buffer2, offset2Adj + (long)i);
long diff = lw ^ rw;
if (diff != 0) {
if (!littleEndian) { return lessThanUnsigned(lw, rw) ? -1 : 1; }
// Use binary search
int n = 0;
int y;
int x = (int)diff;
if (x == 0) {
x = (int)(diff >>> 32);
n = 32;
}
y = x << 16;
if (y == 0) {
n += 16;
} else {
x = y;
}
y = x << 8;
if (y == 0) {
n += 8;
}
return (int)(((lw >>> n) & 0xFFL) - ((rw >>> n) & 0xFFL));
}
}
// The epilogue to cover the last (minLength % 8) elements.
for (int i = minWords * Longs.BYTES; i < minLength; i++) {
int result = UnsignedBytes.compare(buffer1[offset1 + i], buffer2[offset2 + i]);
if (result != 0) { return result; }
}
return length2 - length1;
}
}
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.map.client;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.utils.KeycloakModelUtils;
import java.security.MessageDigest;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
/**
*
* @author hmlnarik
*/
public abstract class MapClientAdapter extends AbstractClientModel<MapClientEntity> implements ClientModel {
public MapClientAdapter(KeycloakSession session, RealmModel realm, MapClientEntity entity) {
super(session, realm, entity);
}
@Override
public String getId() {
return entity.getId().toString();
}
@Override
public String getClientId() {
return entity.getClientId();
}
@Override
public void setClientId(String clientId) {
entity.setClientId(clientId);
}
@Override
public String getName() {
return entity.getName();
}
@Override
public void setName(String name) {
entity.setName(name);
}
@Override
public String getDescription() {
return entity.getDescription();
}
@Override
public void setDescription(String description) {
entity.setDescription(description);
}
@Override
public boolean isEnabled() {
return entity.isEnabled();
}
@Override
public void setEnabled(boolean enabled) {
entity.setEnabled(enabled);
}
@Override
public boolean isAlwaysDisplayInConsole() {
return entity.isAlwaysDisplayInConsole();
}
@Override
public void setAlwaysDisplayInConsole(boolean alwaysDisplayInConsole) {
entity.setAlwaysDisplayInConsole(alwaysDisplayInConsole);
}
@Override
public boolean isSurrogateAuthRequired() {
return entity.isSurrogateAuthRequired();
}
@Override
public void setSurrogateAuthRequired(boolean surrogateAuthRequired) {
entity.setSurrogateAuthRequired(surrogateAuthRequired);
}
@Override
public Set<String> getWebOrigins() {
return entity.getWebOrigins();
}
@Override
public void setWebOrigins(Set<String> webOrigins) {
entity.setWebOrigins(webOrigins);
}
@Override
public void addWebOrigin(String webOrigin) {
entity.addWebOrigin(webOrigin);
}
@Override
public void removeWebOrigin(String webOrigin) {
entity.removeWebOrigin(webOrigin);
}
@Override
public Set<String> getRedirectUris() {
return entity.getRedirectUris();
}
@Override
public void setRedirectUris(Set<String> redirectUris) {
entity.setRedirectUris(redirectUris);
}
@Override
public void addRedirectUri(String redirectUri) {
entity.addRedirectUri(redirectUri);
}
@Override
public void removeRedirectUri(String redirectUri) {
entity.removeRedirectUri(redirectUri);
}
@Override
public String getManagementUrl() {
return entity.getManagementUrl();
}
@Override
public void setManagementUrl(String url) {
entity.setManagementUrl(url);
}
@Override
public String getRootUrl() {
return entity.getRootUrl();
}
@Override
public void setRootUrl(String url) {
entity.setRootUrl(url);
}
@Override
public String getBaseUrl() {
return entity.getBaseUrl();
}
@Override
public void setBaseUrl(String url) {
entity.setBaseUrl(url);
}
@Override
public boolean isBearerOnly() {
return entity.isBearerOnly();
}
@Override
public void setBearerOnly(boolean only) {
entity.setBearerOnly(only);
}
@Override
public String getClientAuthenticatorType() {
return entity.getClientAuthenticatorType();
}
@Override
public void setClientAuthenticatorType(String clientAuthenticatorType) {
entity.setClientAuthenticatorType(clientAuthenticatorType);
}
@Override
public boolean validateSecret(String secret) {
return MessageDigest.isEqual(secret.getBytes(), entity.getSecret().getBytes());
}
@Override
public String getSecret() {
return entity.getSecret();
}
@Override
public void setSecret(String secret) {
entity.setSecret(secret);
}
@Override
public int getNodeReRegistrationTimeout() {
return entity.getNodeReRegistrationTimeout();
}
@Override
public void setNodeReRegistrationTimeout(int timeout) {
entity.setNodeReRegistrationTimeout(timeout);
}
@Override
public String getRegistrationToken() {
return entity.getRegistrationToken();
}
@Override
public void setRegistrationToken(String registrationToken) {
entity.setRegistrationToken(registrationToken);
}
@Override
public String getProtocol() {
return entity.getProtocol();
}
@Override
public void setProtocol(String protocol) {
entity.setProtocol(protocol);
session.getKeycloakSessionFactory().publish((ClientModel.ClientProtocolUpdatedEvent) () -> MapClientAdapter.this);
}
@Override
public void setAttribute(String name, String value) {
boolean valueUndefined = value == null || "".equals(value.trim());
if (valueUndefined) {
removeAttribute(name);
return;
}
entity.setAttribute(name, value);
}
@Override
public void removeAttribute(String name) {
entity.removeAttribute(name);
}
@Override
public String getAttribute(String name) {
return entity.getAttribute(name);
}
@Override
public Map<String, String> getAttributes() {
return entity.getAttributes();
}
@Override
public String getAuthenticationFlowBindingOverride(String binding) {
return entity.getAuthenticationFlowBindingOverride(binding);
}
@Override
public Map<String, String> getAuthenticationFlowBindingOverrides() {
return entity.getAuthenticationFlowBindingOverrides();
}
@Override
public void removeAuthenticationFlowBindingOverride(String binding) {
entity.removeAuthenticationFlowBindingOverride(binding);
}
@Override
public void setAuthenticationFlowBindingOverride(String binding, String flowId) {
entity.setAuthenticationFlowBindingOverride(binding, flowId);
}
@Override
public boolean isFrontchannelLogout() {
return entity.isFrontchannelLogout();
}
@Override
public void setFrontchannelLogout(boolean flag) {
entity.setFrontchannelLogout(flag);
}
@Override
public boolean isFullScopeAllowed() {
return entity.isFullScopeAllowed();
}
@Override
public void setFullScopeAllowed(boolean value) {
entity.setFullScopeAllowed(value);
}
@Override
public boolean isPublicClient() {
return entity.isPublicClient();
}
@Override
public void setPublicClient(boolean flag) {
entity.setPublicClient(flag);
}
@Override
public boolean isConsentRequired() {
return entity.isConsentRequired();
}
@Override
public void setConsentRequired(boolean consentRequired) {
entity.setConsentRequired(consentRequired);
}
@Override
public boolean isStandardFlowEnabled() {
return entity.isStandardFlowEnabled();
}
@Override
public void setStandardFlowEnabled(boolean standardFlowEnabled) {
entity.setStandardFlowEnabled(standardFlowEnabled);
}
@Override
public boolean isImplicitFlowEnabled() {
return entity.isImplicitFlowEnabled();
}
@Override
public void setImplicitFlowEnabled(boolean implicitFlowEnabled) {
entity.setImplicitFlowEnabled(implicitFlowEnabled);
}
@Override
public boolean isDirectAccessGrantsEnabled() {
return entity.isDirectAccessGrantsEnabled();
}
@Override
public void setDirectAccessGrantsEnabled(boolean directAccessGrantsEnabled) {
entity.setDirectAccessGrantsEnabled(directAccessGrantsEnabled);
}
@Override
public boolean isServiceAccountsEnabled() {
return entity.isServiceAccountsEnabled();
}
@Override
public void setServiceAccountsEnabled(boolean serviceAccountsEnabled) {
entity.setServiceAccountsEnabled(serviceAccountsEnabled);
}
@Override
public RealmModel getRealm() {
return realm;
}
@Override
public int getNotBefore() {
return entity.getNotBefore();
}
@Override
public void setNotBefore(int notBefore) {
entity.setNotBefore(notBefore);
}
/*************** Scopes mappings ****************/
@Override
public Stream<RoleModel> getScopeMappingsStream() {
return this.entity.getScopeMappings().stream()
.map(realm::getRoleById)
.filter(Objects::nonNull);
}
@Override
public void addScopeMapping(RoleModel role) {
final String id = role == null ? null : role.getId();
if (id != null) {
this.entity.addScopeMapping(id);
}
}
@Override
public void deleteScopeMapping(RoleModel role) {
final String id = role == null ? null : role.getId();
if (id != null) {
this.entity.deleteScopeMapping(id);
}
}
@Override
public boolean hasScope(RoleModel role) {
if (isFullScopeAllowed()) return true;
final String id = role == null ? null : role.getId();
if (id != null && this.entity.getScopeMappings().contains(id)) {
return true;
}
if (getScopeMappingsStream().anyMatch(r -> r.hasRole(role))) {
return true;
}
return getRolesStream().anyMatch(r -> (Objects.equals(r, role) || r.hasRole(role)));
}
/*************** Default roles ****************/
@Override
@Deprecated
public Stream<String> getDefaultRolesStream() {
return realm.getDefaultRole().getCompositesStream().filter(this::isClientRole).map(RoleModel::getName);
}
private boolean isClientRole(RoleModel role) {
return role.isClientRole() && Objects.equals(role.getContainerId(), this.getId());
}
@Override
@Deprecated
public void addDefaultRole(String name) {
realm.getDefaultRole().addCompositeRole(getOrAddRoleId(name));
}
private RoleModel getOrAddRoleId(String name) {
RoleModel role = getRole(name);
if (role == null) {
role = addRole(name);
}
return role;
}
@Override
@Deprecated
public void removeDefaultRoles(String... defaultRoles) {
for (String defaultRole : defaultRoles) {
realm.getDefaultRole().removeCompositeRole(getRole(defaultRole));
}
}
/*************** Protocol mappers ****************/
@Override
public Stream<ProtocolMapperModel> getProtocolMappersStream() {
return entity.getProtocolMappers().stream().distinct();
}
@Override
public ProtocolMapperModel addProtocolMapper(ProtocolMapperModel model) {
if (model == null) {
return null;
}
ProtocolMapperModel pm = new ProtocolMapperModel();
pm.setId(KeycloakModelUtils.generateId());
pm.setName(model.getName());
pm.setProtocol(model.getProtocol());
pm.setProtocolMapper(model.getProtocolMapper());
if (model.getConfig() != null) {
pm.setConfig(new HashMap<>(model.getConfig()));
} else {
pm.setConfig(new HashMap<>());
}
return entity.addProtocolMapper(pm);
}
@Override
public void removeProtocolMapper(ProtocolMapperModel mapping) {
final String id = mapping == null ? null : mapping.getId();
if (id != null) {
entity.removeProtocolMapper(id);
}
}
@Override
public void updateProtocolMapper(ProtocolMapperModel mapping) {
final String id = mapping == null ? null : mapping.getId();
if (id != null) {
entity.updateProtocolMapper(id, mapping);
}
}
@Override
public ProtocolMapperModel getProtocolMapperById(String id) {
return entity.getProtocolMapperById(id);
}
@Override
public ProtocolMapperModel getProtocolMapperByName(String protocol, String name) {
return entity.getProtocolMappers().stream()
.filter(pm -> Objects.equals(pm.getProtocol(), protocol) && Objects.equals(pm.getName(), name))
.findAny()
.orElse(null);
}
@Override
public String toString() {
return String.format("%s@%08x", getClientId(), System.identityHashCode(this));
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.workitem.bpmn2;
import java.io.File;
import java.math.BigDecimal;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import org.apache.commons.io.IOUtils;
import org.drools.compiler.compiler.ProcessBuilderFactory;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.drools.core.impl.EnvironmentFactory;
import org.drools.core.impl.KnowledgeBaseFactory;
import org.drools.core.runtime.process.ProcessRuntimeFactory;
import org.jbpm.process.builder.ProcessBuilderFactoryServiceImpl;
import org.jbpm.process.instance.ProcessRuntimeFactoryServiceImpl;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.Message;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.Results;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.KieSessionConfiguration;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.process.WorkflowProcessInstance;
import org.kie.internal.builder.InternalKieBuilder;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
import org.kie.scanner.KieMavenRepository;
import static org.junit.Assert.*;
public class BusinessRuleTaskTest {
private static final String GROUP_ID = "org.jbpm";
private static final String ARTIFACT_ID = "test-kjar";
private static final String VERSION = "1.0";
private KieServices ks = KieServices.Factory.get();
@Before
public void setup() throws Exception {
createAndDeployJar(ks,
ks.newReleaseId(GROUP_ID,
ARTIFACT_ID,
VERSION),
ks.getResources().newClassPathResource("businessRule.drl"),
ks.getResources().newClassPathResource("0020-vacation-days.dmn"));
}
@Test
public void testBusinessRuleTaskProcess() throws Exception {
KieBase kbase = readKnowledgeBase();
KieSession ksession = createSession(kbase);
BusinessRuleTaskHandler handler = new BusinessRuleTaskHandler(GROUP_ID,
ARTIFACT_ID,
VERSION);
ksession.getWorkItemManager().registerWorkItemHandler("BusinessRuleTask",
handler);
Map<String, Object> params = new HashMap<String, Object>();
params.put("person",
new org.jbpm.process.workitem.bpmn2.objects.Person("john"));
WorkflowProcessInstance processInstance = (WorkflowProcessInstance) ksession.startProcess("evaluation.ruletask",
params);
org.jbpm.process.workitem.bpmn2.objects.Person variable = (org.jbpm.process.workitem.bpmn2.objects.Person) processInstance.getVariable("person");
assertEquals("john",
variable.getName());
assertEquals(35,
variable.getAge().intValue());
assertEquals(ProcessInstance.STATE_COMPLETED,
processInstance.getState());
}
@Test
public void testDecisionTaskProcess() throws Exception {
KieBase kbase = readKnowledgeBase();
KieSession ksession = createSession(kbase);
BusinessRuleTaskHandler handler = new BusinessRuleTaskHandler(GROUP_ID,
ARTIFACT_ID,
VERSION);
ksession.getWorkItemManager().registerWorkItemHandler("DecisionTask",
handler);
Map<String, Object> params = new HashMap<String, Object>();
params.put("age",
16);
params.put("yearsOfService",
1);
WorkflowProcessInstance processInstance = (WorkflowProcessInstance) ksession.startProcess("BPMN2-BusinessRuleTask",
params);
BigDecimal variable = (BigDecimal) processInstance.getVariable("vacationDays");
assertEquals(27,
variable.intValue());
assertEquals(ProcessInstance.STATE_COMPLETED,
processInstance.getState());
}
@Test
public void testDecisionPassingNullToDMN() throws Exception {
//This test make sure that null variables are passed to the DMN execution context, otherwise DMN will throw an exception that an input requirement is missing
KieBase kbase = readKnowledgeBase();
KieSession ksession = createSession(kbase);
Map<String, Object> params = new HashMap<String, Object>();
params.put("Input",
null);
WorkflowProcessInstance processInstance = (WorkflowProcessInstance) ksession.startProcess("passthru",
params);
Object output = processInstance.getVariable("Output");
assertEquals(null,
output);
}
@Test
public void testDecisionPassingNonNullToDMN() throws Exception {
//This test make sure that null variables are passed to the DMN execution context, otherwise DMN will throw an exception that an input requirement is missing
KieBase kbase = readKnowledgeBase();
KieSession ksession = createSession(kbase);
Map<String, Object> params = new HashMap<String, Object>();
params.put("Input",
"Hello World");
WorkflowProcessInstance processInstance = (WorkflowProcessInstance) ksession.startProcess("passthru",
params);
Object output = processInstance.getVariable("Output");
assertEquals("Hello World",
output);
}
@Test
public void testCallingDecisionService() throws Exception {
//This test make sure that null variables are passed to the DMN execution context, otherwise DMN will throw an exception that an input requirement is missing
KieBase kbase = readKnowledgeBase();
KieSession ksession = createSession(kbase);
Map<String, Object> params = new HashMap<String, Object>();
params.put("Input",
"Hello World");
WorkflowProcessInstance processInstance = (WorkflowProcessInstance) ksession.startProcess("CallDecision",
params);
Object output = processInstance.getVariable("Output Decision");
//This is mapped with data associations to Output Decision in the DMN model
assertEquals("Hello World",
output);
Object encapsulatedOutput = processInstance.getVariable("Encapsulated Output");
//This is mapped with data associations to Encapsulated Output in the DMN model and should not be part of the returned model because encapsulated decisions are not returned
assertEquals(null,
encapsulatedOutput);
}
private static KieBase readKnowledgeBase() throws Exception {
ProcessBuilderFactory.setProcessBuilderFactoryService(new ProcessBuilderFactoryServiceImpl());
ProcessRuntimeFactory.setProcessRuntimeFactoryService(new ProcessRuntimeFactoryServiceImpl());
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add(ResourceFactory.newClassPathResource("businessRuleTaskProcess.bpmn2"),
ResourceType.BPMN2);
kbuilder.add(ResourceFactory.newClassPathResource("businessRuleTaskDMN.bpmn2"),
ResourceType.BPMN2);
kbuilder.add(ResourceFactory.newClassPathResource("string-passthru.dmn"),
ResourceType.DMN);
kbuilder.add(ResourceFactory.newClassPathResource("calling-dmn-passthru.bpmn2"),
ResourceType.BPMN2);
kbuilder.add(ResourceFactory.newClassPathResource("decision-service.dmn"),
ResourceType.DMN);
kbuilder.add(ResourceFactory.newClassPathResource("calling-dmn-decision-service.bpmn2"),
ResourceType.BPMN2);
return kbuilder.newKieBase();
}
private static KieSession createSession(KieBase kbase) {
Properties properties = new Properties();
properties.put("drools.processInstanceManagerFactory",
"org.jbpm.process.instance.impl.DefaultProcessInstanceManagerFactory");
properties.put("drools.processSignalManagerFactory",
"org.jbpm.process.instance.event.DefaultSignalManagerFactory");
KieSessionConfiguration config = KnowledgeBaseFactory.newKnowledgeSessionConfiguration(properties);
return kbase.newKieSession(config,
EnvironmentFactory.newEnvironment());
}
private byte[] createAndDeployJar(KieServices ks,
ReleaseId releaseId,
Resource... resources) throws Exception {
KieFileSystem kfs = ks.newKieFileSystem().generateAndWritePomXML(releaseId);
for (int i = 0; i < resources.length; i++) {
if (resources[i] != null) {
kfs.write(resources[i]);
}
}
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
((InternalKieBuilder) kieBuilder).buildAll(o -> true);
Results results = kieBuilder.getResults();
if (results.hasMessages(Message.Level.ERROR)) {
throw new IllegalStateException(results.getMessages(Message.Level.ERROR).toString());
}
InternalKieModule kieModule = (InternalKieModule) ks.getRepository().getKieModule(releaseId);
byte[] pomXmlContent = IOUtils.toByteArray(kieModule.getPomAsStream());
File pom = new File("target",
UUID.randomUUID().toString());
Files.write(pom.toPath(),
pomXmlContent);
KieMavenRepository.getKieMavenRepository().installArtifact(releaseId,
kieModule,
pom);
byte[] jar = kieModule.getBytes();
return jar;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.container;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.refEq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.atLeastOnce;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.AbstractMap.SimpleEntry;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerRetryContext;
import org.apache.hadoop.yarn.api.records.ContainerRetryPolicy;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode;
import org.apache.hadoop.yarn.server.nodemanager.ContainerStateTransitionListener;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncher;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainersLauncherEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationCleanupEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ContainerLocalizationRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerScheduler;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.ContainerSchedulerEventType;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ControlledClock;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
public class TestContainer {
final NodeManagerMetrics metrics = NodeManagerMetrics.create();
final Configuration conf = new YarnConfiguration();
final String FAKE_LOCALIZATION_ERROR = "Fake localization error";
/**
* Verify correct container request events sent to localizer.
*/
@Test
public void testLocalizationRequest() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(7, 314159265358979L, 4344, "yak");
assertEquals(ContainerState.NEW, wc.c.getContainerState());
wc.initContainer();
// Verify request for public/private resources to localizer
ResourcesRequestedMatcher matchesReq =
new ResourcesRequestedMatcher(wc.localResources, EnumSet.of(
LocalResourceVisibility.PUBLIC, LocalResourceVisibility.PRIVATE,
LocalResourceVisibility.APPLICATION));
verify(wc.localizerBus).handle(argThat(matchesReq));
assertEquals(ContainerState.LOCALIZING, wc.c.getContainerState());
}
finally {
if (wc != null) {
wc.finished();
}
}
}
/**
* Verify container launch when all resources already cached.
*/
@Test
public void testLocalizationLaunch() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(8, 314159265358979L, 4344, "yak");
assertEquals(ContainerState.NEW, wc.c.getContainerState());
wc.initContainer();
Map<Path, List<String>> localPaths = wc.localizeResources();
// all resources should be localized
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
assertNotNull(wc.c.getLocalizedResources());
for (Entry<Path, List<String>> loc : wc.c.getLocalizedResources()
.entrySet()) {
assertEquals(localPaths.remove(loc.getKey()), loc.getValue());
}
assertTrue(localPaths.isEmpty());
final WrappedContainer wcf = wc;
// verify container launch
ArgumentMatcher<ContainersLauncherEvent> matchesContainerLaunch =
event -> wcf.c == event.getContainer();
verify(wc.launcherBus).handle(argThat(matchesContainerLaunch));
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testExternalKill() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(13, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
int running = metrics.getRunningContainers();
wc.launchContainer();
assertEquals(running + 1, metrics.getRunningContainers());
reset(wc.localizerBus);
wc.containerKilledOnRequest();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int failed = metrics.getFailedContainers();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(failed + 1, metrics.getFailedContainers());
assertEquals(running, metrics.getRunningContainers());
}
finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testDockerContainerExternalKill() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(13, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
int running = metrics.getRunningContainers();
wc.launchContainer();
assertEquals(running + 1, metrics.getRunningContainers());
reset(wc.localizerBus);
wc.containerKilledOnRequest();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int failed = metrics.getFailedContainers();
wc.dockerContainerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(failed + 1, metrics.getFailedContainers());
assertEquals(running, metrics.getRunningContainers());
}
finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testContainerPauseAndResume() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(13, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
int running = metrics.getRunningContainers();
int paused = metrics.getPausedContainers();
wc.launchContainer();
assertEquals(running + 1, metrics.getRunningContainers());
reset(wc.localizerBus);
wc.pauseContainer();
assertEquals(ContainerState.PAUSED,
wc.c.getContainerState());
assertEquals(paused + 1, metrics.getPausedContainers());
wc.resumeContainer();
assertEquals(paused, metrics.getPausedContainers());
assertEquals(ContainerState.RUNNING,
wc.c.getContainerState());
wc.containerKilledOnRequest();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int failed = metrics.getFailedContainers();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(failed + 1, metrics.getFailedContainers());
assertEquals(running, metrics.getRunningContainers());
}
finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testCleanupOnFailure() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(10, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode());
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
}
finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testDockerContainerCleanupOnFailure() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(10, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode());
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
wc.dockerContainerResourcesCleanup();
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testCleanupOnSuccess() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(11, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
int running = metrics.getRunningContainers();
wc.launchContainer();
assertEquals(running + 1, metrics.getRunningContainers());
reset(wc.localizerBus);
wc.containerSuccessful();
assertEquals(ContainerState.EXITED_WITH_SUCCESS,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int completed = metrics.getCompletedContainers();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(completed + 1, metrics.getCompletedContainers());
assertEquals(running, metrics.getRunningContainers());
ContainerEventType e1 = wc.initStateToEvent.get(ContainerState.NEW);
ContainerState s2 = wc.eventToFinalState.get(e1);
ContainerEventType e2 = wc.initStateToEvent.get(s2);
ContainerState s3 = wc.eventToFinalState.get(e2);
ContainerEventType e3 = wc.initStateToEvent.get(s3);
ContainerState s4 = wc.eventToFinalState.get(e3);
ContainerEventType e4 = wc.initStateToEvent.get(s4);
ContainerState s5 = wc.eventToFinalState.get(e4);
ContainerEventType e5 = wc.initStateToEvent.get(s5);
ContainerState s6 = wc.eventToFinalState.get(e5);
Assert.assertEquals(ContainerState.LOCALIZING, s2);
Assert.assertEquals(ContainerState.SCHEDULED, s3);
Assert.assertEquals(ContainerState.RUNNING, s4);
Assert.assertEquals(ContainerState.EXITED_WITH_SUCCESS, s5);
Assert.assertEquals(ContainerState.DONE, s6);
Assert.assertEquals(ContainerEventType.INIT_CONTAINER, e1);
Assert.assertEquals(ContainerEventType.RESOURCE_LOCALIZED, e2);
Assert.assertEquals(ContainerEventType.CONTAINER_LAUNCHED, e3);
Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS, e4);
Assert.assertEquals(ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP, e5);
}
finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testDockerContainerCleanupOnSuccess() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(11, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
int running = metrics.getRunningContainers();
wc.launchContainer();
assertEquals(running + 1, metrics.getRunningContainers());
reset(wc.localizerBus);
wc.containerSuccessful();
assertEquals(ContainerState.EXITED_WITH_SUCCESS,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int completed = metrics.getCompletedContainers();
wc.dockerContainerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(completed + 1, metrics.getCompletedContainers());
assertEquals(running, metrics.getRunningContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testInitWhileDone() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(6, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerSuccessful();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
verifyOutofBandHeartBeat(wc);
assertNull(wc.c.getLocalizedResources());
// Now in DONE, issue INIT
wc.initContainer();
// Verify still in DONE
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testDockerContainerInitWhileDone() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(6, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerSuccessful();
wc.dockerContainerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
verifyOutofBandHeartBeat(wc);
assertNull(wc.c.getLocalizedResources());
// Now in DONE, issue INIT
wc.initContainer();
// Verify still in DONE
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked")
// mocked generic
public void testLocalizationFailureAtDone() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(6, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerSuccessful();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
verifyOutofBandHeartBeat(wc);
assertNull(wc.c.getLocalizedResources());
// Now in DONE, issue RESOURCE_FAILED as done by LocalizeRunner
wc.resourceFailedContainer();
// Verify still in DONE
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked")
// mocked generic
public void testDockerContainerLocalizationFailureAtDone() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(6, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.containerSuccessful();
wc.dockerContainerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
verifyOutofBandHeartBeat(wc);
assertNull(wc.c.getLocalizedResources());
// Now in DONE, issue RESOURCE_FAILED as done by LocalizeRunner
wc.resourceFailedContainer();
// Verify still in DONE
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked")
public void testLocalizationFailureWhileRunning()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(6, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
assertEquals(ContainerState.RUNNING, wc.c.getContainerState());
// Now in RUNNING, handle ContainerResourceFailedEvent, cause NPE before
wc.handleContainerResourceFailedEvent();
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
@SuppressWarnings("unchecked") // mocked generic
public void testCleanupOnKillRequest() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(12, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
reset(wc.localizerBus);
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.containerKilledOnRequest();
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnNew() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(13, 314159265358979L, 4344, "yak");
assertEquals(ContainerState.NEW, wc.c.getContainerState());
int killed = metrics.getKilledContainers();
wc.killContainer();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
verifyOutofBandHeartBeat(wc);
assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER,
wc.c.cloneAndGetContainerStatus().getExitStatus());
assertTrue(wc.c.cloneAndGetContainerStatus().getDiagnostics()
.contains("KillRequest"));
assertEquals(killed + 1, metrics.getKilledContainers());
// check container metrics is generated.
ContainerMetrics containerMetrics =
ContainerMetrics.forContainer(wc.cId, 1, 5000);
Assert.assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER,
containerMetrics.exitCode.value());
Assert.assertTrue(containerMetrics.startTime.value() > 0);
Assert.assertTrue(containerMetrics.finishTime.value() >=
containerMetrics.startTime.value());
Assert.assertEquals(ContainerEventType.KILL_CONTAINER,
wc.initStateToEvent.get(ContainerState.NEW));
Assert.assertEquals(ContainerState.DONE,
wc.eventToFinalState.get(ContainerEventType.KILL_CONTAINER));
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizing() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(14, 314159265358979L, 4344, "yak");
wc.initContainer();
assertEquals(ContainerState.LOCALIZING, wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertEquals(ContainerExitStatus.KILLED_BY_RESOURCEMANAGER,
wc.c.cloneAndGetContainerStatus().getExitStatus());
assertTrue(wc.c.cloneAndGetContainerStatus().getDiagnostics()
.contains("KillRequest"));
int killed = metrics.getKilledContainers();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(killed + 1, metrics.getKilledContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizationFailed() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(15, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.failLocalizeResources(wc.getLocalResourceCount());
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.killContainer();
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int failed = metrics.getFailedContainers();
wc.containerResourcesCleanup();
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(failed + 1, metrics.getFailedContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizedWhenContainerNotLaunchedContainerKilled()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
// check that container cleanup hasn't started at this point.
LocalizationCleanupMatcher cleanupResources =
new LocalizationCleanupMatcher(wc.c);
verify(wc.localizerBus, times(0)).handle(argThat(cleanupResources));
// check if containerlauncher cleans up the container launch.
verify(wc.launcherBus)
.handle(refEq(new ContainersLauncherEvent(wc.c,
ContainersLauncherEventType.CLEANUP_CONTAINER), "timestamp"));
launcher.call();
wc.drainDispatcherEvents();
assertEquals(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
int killed = metrics.getKilledContainers();
wc.c.handle(new ContainerEvent(wc.c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(killed + 1, metrics.getKilledContainers());
assertEquals(0, metrics.getRunningContainers());
assertEquals(0, wc.launcher.running.size());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testDockerKillOnLocalizedWhenContainerNotLaunchedContainerKilled()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
launcher.call();
wc.drainDispatcherEvents();
assertEquals(ContainerState.CONTAINER_CLEANEDUP_AFTER_KILL,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyDockerContainerCleanupCall(wc);
int killed = metrics.getKilledContainers();
wc.c.handle(new ContainerEvent(wc.c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(killed + 1, metrics.getKilledContainers());
assertEquals(0, metrics.getRunningContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizedWhenContainerNotLaunchedContainerSuccess()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
wc.containerSuccessful();
wc.drainDispatcherEvents();
assertEquals(ContainerState.EXITED_WITH_SUCCESS,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
wc.c.handle(new ContainerEvent(wc.c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(0, metrics.getRunningContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizedWhenContainerNotLaunchedContainerFailure()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode());
wc.drainDispatcherEvents();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
wc.c.handle(new ContainerEvent(wc.c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(0, metrics.getRunningContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testDockerKillOnLocalizedContainerNotLaunchedContainerFailure()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
wc.containerFailed(ExitCode.FORCE_KILLED.getExitCode());
wc.drainDispatcherEvents();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyDockerContainerCleanupCall(wc);
wc.c.handle(new ContainerEvent(wc.c.getContainerId(),
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
assertEquals(ContainerState.DONE, wc.c.getContainerState());
assertEquals(0, metrics.getRunningContainers());
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testKillOnLocalizedWhenContainerLaunched() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId());
launcher.call();
wc.drainDispatcherEvents();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testDockerKillOnLocalizedWhenContainerLaunched()
throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(17, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
assertEquals(ContainerState.SCHEDULED, wc.c.getContainerState());
ContainerLaunch launcher = wc.launcher.running.get(wc.c.getContainerId());
launcher.call();
wc.drainDispatcherEvents();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
wc.killContainer();
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyDockerContainerCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testResourceLocalizedOnLocalizationFailed() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(16, 314159265358979L, 4344, "yak");
wc.initContainer();
int failCount = wc.getLocalResourceCount()/2;
if (failCount == 0) {
failCount = 1;
}
wc.failLocalizeResources(failCount);
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.localizeResourcesFromInvalidState(failCount);
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
Assert.assertTrue(wc.getDiagnostics().contains(FAKE_LOCALIZATION_ERROR));
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testResourceFailedOnLocalizationFailed() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(16, 314159265358979L, 4344, "yak");
wc.initContainer();
Iterator<String> lRsrcKeys = wc.localResources.keySet().iterator();
String key1 = lRsrcKeys.next();
String key2 = lRsrcKeys.next();
wc.failLocalizeSpecificResource(key1);
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.failLocalizeSpecificResource(key2);
assertEquals(ContainerState.LOCALIZATION_FAILED, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testResourceFailedOnKilling() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(16, 314159265358979L, 4344, "yak");
wc.initContainer();
Iterator<String> lRsrcKeys = wc.localResources.keySet().iterator();
String key1 = lRsrcKeys.next();
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.failLocalizeSpecificResource(key1);
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
/**
* Verify serviceData correctly sent.
*/
@Test
public void testServiceData() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(9, 314159265358979L, 4344, "yak", false, true);
assertEquals(ContainerState.NEW, wc.c.getContainerState());
wc.initContainer();
for (final Map.Entry<String,ByteBuffer> e : wc.serviceData.entrySet()) {
ArgumentMatcher<AuxServicesEvent> matchesServiceReq =
evt -> e.getKey().equals(evt.getServiceID())
&& 0 == e.getValue().compareTo(evt.getServiceData());
verify(wc.auxBus).handle(argThat(matchesServiceReq));
}
final WrappedContainer wcf = wc;
// verify launch on empty resource request
ArgumentMatcher<ContainersLauncherEvent> matchesLaunchReq =
evt -> evt.getType() == ContainersLauncherEventType.LAUNCH_CONTAINER
&& wcf.cId.equals(evt.getContainer().getContainerId());
verify(wc.launcherBus).handle(argThat(matchesLaunchReq));
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testLaunchAfterKillRequest() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(14, 314159265358979L, 4344, "yak");
wc.initContainer();
wc.localizeResources();
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.launchContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.containerKilledOnRequest();
verifyCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testDockerContainerLaunchAfterKillRequest() throws Exception {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(14, 314159265358979L, 4344, "yak");
wc.setupDockerContainerEnv();
wc.initContainer();
wc.localizeResources();
wc.killContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.launchContainer();
assertEquals(ContainerState.KILLING, wc.c.getContainerState());
assertNull(wc.c.getLocalizedResources());
wc.containerKilledOnRequest();
verifyDockerContainerCleanupCall(wc);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testContainerRetry() throws Exception{
ContainerRetryContext containerRetryContext1 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.NEVER_RETRY, null, 3, 0);
testContainerRetry(containerRetryContext1, 2, 0);
ContainerRetryContext containerRetryContext2 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0);
testContainerRetry(containerRetryContext2, 2, 3);
ContainerRetryContext containerRetryContext3 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0);
// If exit code is 0, it will not retry
testContainerRetry(containerRetryContext3, 0, 0);
ContainerRetryContext containerRetryContext4 = ContainerRetryContext
.newInstance(
ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES, null, 3, 0);
testContainerRetry(containerRetryContext4, 2, 0);
HashSet<Integer> errorCodes = new HashSet<>();
errorCodes.add(2);
errorCodes.add(6);
ContainerRetryContext containerRetryContext5 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES,
errorCodes, 3, 0);
testContainerRetry(containerRetryContext5, 2, 3);
HashSet<Integer> errorCodes2 = new HashSet<>();
errorCodes.add(143);
ContainerRetryContext containerRetryContext6 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_SPECIFIC_ERROR_CODES,
errorCodes2, 3, 0);
// If exit code is 143(SIGTERM), it will not retry even it is in errorCodes.
testContainerRetry(containerRetryContext6, 143, 0);
}
private void testContainerRetry(ContainerRetryContext containerRetryContext,
int exitCode, int expectedRetries) throws Exception{
WrappedContainer wc = null;
try {
int retryTimes = 0;
wc = new WrappedContainer(24, 314159265358979L, 4344, "yak",
containerRetryContext);
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
while (true) {
wc.containerFailed(exitCode);
if (wc.c.getContainerState() == ContainerState.RUNNING) {
retryTimes ++;
} else {
break;
}
}
Assert.assertEquals(expectedRetries, retryTimes);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testContainerRestartInterval() throws IOException {
conf.setInt(YarnConfiguration.NM_CONTAINER_RETRY_MINIMUM_INTERVAL_MS, 2000);
ContainerRetryContext containerRetryContext1 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.NEVER_RETRY, null, 3, 0);
testContainerRestartInterval(containerRetryContext1, 0);
ContainerRetryContext containerRetryContext2 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 0);
testContainerRestartInterval(containerRetryContext2, 2000);
ContainerRetryContext containerRetryContext3 = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 3, 4000);
testContainerRestartInterval(containerRetryContext3, 4000);
}
private void testContainerRestartInterval(
ContainerRetryContext containerRetryContext,
int expectedRestartInterval) throws IOException {
WrappedContainer wc = null;
try {
wc = new WrappedContainer(25, 314159265358980L, 4345,
"yak", containerRetryContext);
Assert.assertEquals(
((ContainerImpl)wc.c).getContainerRetryContext().getRetryInterval(),
expectedRestartInterval);
} finally {
if (wc != null) {
wc.finished();
}
}
}
@Test
public void testContainerRetryFailureValidityInterval() throws Exception {
ContainerRetryContext containerRetryContext = ContainerRetryContext
.newInstance(ContainerRetryPolicy.RETRY_ON_ALL_ERRORS, null, 1, 0, 10);
WrappedContainer wc = null;
try {
wc = new WrappedContainer(25, 314159265358980L, 4200, "test",
containerRetryContext);
ControlledClock clock = new ControlledClock();
wc.getRetryPolicy().setClock(clock);
wc.initContainer();
wc.localizeResources();
wc.launchContainer();
wc.containerFailed(12);
assertEquals(ContainerState.RUNNING, wc.c.getContainerState());
clock.setTime(20);
wc.containerFailed(12);
assertEquals(ContainerState.RUNNING, wc.c.getContainerState());
clock.setTime(40);
wc.containerFailed(12);
assertEquals(ContainerState.RUNNING, wc.c.getContainerState());
clock.setTime(45);
wc.containerFailed(12);
assertEquals(ContainerState.EXITED_WITH_FAILURE,
wc.c.getContainerState());
} finally {
if (wc != null) {
wc.finished();
}
}
}
private void verifyCleanupCall(WrappedContainer wc) throws Exception {
ResourcesReleasedMatcher matchesReq =
new ResourcesReleasedMatcher(wc.localResources, EnumSet.of(
LocalResourceVisibility.PUBLIC, LocalResourceVisibility.PRIVATE,
LocalResourceVisibility.APPLICATION), wc.c);
verify(wc.localizerBus, atLeastOnce()).handle(argThat(matchesReq));
}
private void verifyOutofBandHeartBeat(WrappedContainer wc) {
verify(wc.context.getNodeStatusUpdater()).sendOutofBandHeartBeat();
}
private void verifyDockerContainerCleanupCall(WrappedContainer wc)
throws Exception {
// check if containerlauncher cleans up the container launch.
verify(wc.launcherBus)
.handle(refEq(new ContainersLauncherEvent(wc.c,
ContainersLauncherEventType.CLEANUP_CONTAINER), "timestamp"));
}
// Argument matcher for matching container localization cleanup event.
private static class LocalizationCleanupMatcher implements
ArgumentMatcher<LocalizationEvent> {
Container c;
LocalizationCleanupMatcher(Container c){
this.c = c;
}
@Override
public boolean matches(LocalizationEvent e) {
if (!(e instanceof ContainerLocalizationCleanupEvent)) {
return false;
}
ContainerLocalizationCleanupEvent evt =
(ContainerLocalizationCleanupEvent) e;
return (evt.getContainer() == c);
}
}
private static class ResourcesReleasedMatcher extends
LocalizationCleanupMatcher {
final HashSet<LocalResourceRequest> resources =
new HashSet<LocalResourceRequest>();
ResourcesReleasedMatcher(Map<String, LocalResource> allResources,
EnumSet<LocalResourceVisibility> vis, Container c) throws URISyntaxException {
super(c);
for (Entry<String, LocalResource> e : allResources.entrySet()) {
if (vis.contains(e.getValue().getVisibility())) {
resources.add(new LocalResourceRequest(e.getValue()));
}
}
}
@Override
public boolean matches(LocalizationEvent e) {
// match event type and container.
if(!super.matches(e)){
return false;
}
// match resources.
ContainerLocalizationCleanupEvent evt =
(ContainerLocalizationCleanupEvent) e;
final HashSet<LocalResourceRequest> expected =
new HashSet<LocalResourceRequest>(resources);
for (Collection<LocalResourceRequest> rc : evt.getResources().values()) {
for (LocalResourceRequest rsrc : rc) {
if (!expected.remove(rsrc)) {
return false;
}
}
}
return expected.isEmpty();
}
}
// Accept iff the resource payload matches.
private static class ResourcesRequestedMatcher implements
ArgumentMatcher<LocalizationEvent> {
final HashSet<LocalResourceRequest> resources =
new HashSet<LocalResourceRequest>();
ResourcesRequestedMatcher(Map<String, LocalResource> allResources,
EnumSet<LocalResourceVisibility> vis) throws URISyntaxException {
for (Entry<String, LocalResource> e : allResources.entrySet()) {
if (vis.contains(e.getValue().getVisibility())) {
resources.add(new LocalResourceRequest(e.getValue()));
}
}
}
@Override
public boolean matches(LocalizationEvent e) {
ContainerLocalizationRequestEvent evt =
(ContainerLocalizationRequestEvent) e;
final HashSet<LocalResourceRequest> expected =
new HashSet<LocalResourceRequest>(resources);
for (Collection<LocalResourceRequest> rc : evt.getRequestedResources()
.values()) {
for (LocalResourceRequest rsrc : rc) {
if (!expected.remove(rsrc)) {
return false;
}
}
}
return expected.isEmpty();
}
}
private static Entry<String, LocalResource> getMockRsrc(Random r,
LocalResourceVisibility vis) {
String name = Long.toHexString(r.nextLong());
URL url = BuilderUtils.newURL("file", null, 0, "/local" + vis + "/" + name);
LocalResource rsrc =
BuilderUtils.newLocalResource(url, LocalResourceType.FILE, vis,
r.nextInt(1024) + 1024L, r.nextInt(1024) + 2048L, false);
return new SimpleEntry<String, LocalResource>(name, rsrc);
}
private static Map<String,LocalResource> createLocalResources(Random r) {
Map<String,LocalResource> localResources =
new HashMap<String,LocalResource>();
for (int i = r.nextInt(5) + 5; i >= 0; --i) {
Entry<String,LocalResource> rsrc =
getMockRsrc(r, LocalResourceVisibility.PUBLIC);
localResources.put(rsrc.getKey(), rsrc.getValue());
}
for (int i = r.nextInt(5) + 5; i >= 0; --i) {
Entry<String,LocalResource> rsrc =
getMockRsrc(r, LocalResourceVisibility.PRIVATE);
localResources.put(rsrc.getKey(), rsrc.getValue());
}
for (int i = r.nextInt(2) + 2; i >= 0; --i) {
Entry<String,LocalResource> rsrc =
getMockRsrc(r, LocalResourceVisibility.APPLICATION);
localResources.put(rsrc.getKey(), rsrc.getValue());
}
return localResources;
}
private static Map<String,ByteBuffer> createServiceData(Random r) {
Map<String,ByteBuffer> serviceData =
new HashMap<String,ByteBuffer>();
for (int i = r.nextInt(5) + 5; i >= 0; --i) {
String service = Long.toHexString(r.nextLong());
byte[] b = new byte[r.nextInt(1024) + 1024];
r.nextBytes(b);
serviceData.put(service, ByteBuffer.wrap(b));
}
return serviceData;
}
@SuppressWarnings("unchecked")
private class WrappedContainer {
final DrainDispatcher dispatcher;
final EventHandler<LocalizationEvent> localizerBus;
final EventHandler<ContainersLauncherEvent> launcherBus;
final EventHandler<ContainersMonitorEvent> monitorBus;
final EventHandler<AuxServicesEvent> auxBus;
final EventHandler<ApplicationEvent> appBus;
final EventHandler<LogHandlerEvent> LogBus;
final EventHandler<ContainerSchedulerEvent> schedBus;
final ContainersLauncher launcher;
final ContainerLaunchContext ctxt;
final ContainerId cId;
final Container c;
final Map<String, LocalResource> localResources;
final Map<String, ByteBuffer> serviceData;
final Context context = mock(Context.class);
private final DeletionService delService;
private final Map<ContainerState, ContainerEventType> initStateToEvent =
new HashMap<>();
private final Map<ContainerEventType, ContainerState> eventToFinalState =
new HashMap<>();
private final ContainerExecutor exec;
WrappedContainer(int appId, long timestamp, int id, String user)
throws IOException {
this(appId, timestamp, id, user, null);
}
WrappedContainer(int appId, long timestamp, int id, String user,
ContainerRetryContext containerRetryContext) throws IOException {
this(appId, timestamp, id, user, true, false, containerRetryContext);
}
WrappedContainer(int appId, long timestamp, int id, String user,
boolean withLocalRes, boolean withServiceData) throws IOException {
this(appId, timestamp, id, user, withLocalRes, withServiceData, null);
}
@SuppressWarnings("rawtypes")
WrappedContainer(int appId, long timestamp, int id, String user,
boolean withLocalRes, boolean withServiceData,
ContainerRetryContext containerRetryContext) throws IOException {
dispatcher = new DrainDispatcher();
dispatcher.init(new Configuration());
localizerBus = mock(EventHandler.class);
launcherBus = mock(EventHandler.class);
monitorBus = mock(EventHandler.class);
auxBus = mock(EventHandler.class);
appBus = mock(EventHandler.class);
LogBus = mock(EventHandler.class);
delService = mock(DeletionService.class);
schedBus = new ContainerScheduler(context, dispatcher, metrics, 0) {
@Override
protected void scheduleContainer(Container container) {
container.sendLaunchEvent();
}
};
exec = mock(ContainerExecutor.class);
dispatcher.register(LocalizationEventType.class, localizerBus);
dispatcher.register(ContainersLauncherEventType.class, launcherBus);
dispatcher.register(ContainersMonitorEventType.class, monitorBus);
dispatcher.register(ContainerSchedulerEventType.class, schedBus);
dispatcher.register(AuxServicesEventType.class, auxBus);
dispatcher.register(ApplicationEventType.class, appBus);
dispatcher.register(LogHandlerEventType.class, LogBus);
when(context.getApplications()).thenReturn(
new ConcurrentHashMap<ApplicationId, Application>());
NMNullStateStoreService stateStore = new NMNullStateStoreService();
when(context.getNMStateStore()).thenReturn(stateStore);
NodeStatusUpdater nodeStatusUpdater = mock(NodeStatusUpdater.class);
when(context.getNodeStatusUpdater()).thenReturn(nodeStatusUpdater);
ContainerExecutor executor = mock(ContainerExecutor.class);
Mockito.doNothing().when(executor).pauseContainer(any(Container.class));
Mockito.doNothing().when(executor).resumeContainer(any(Container.class));
launcher =
new ContainersLauncher(context, dispatcher, executor, null, null);
// create a mock ExecutorService, which will not really launch
// ContainerLaunch at all.
launcher.containerLauncher = mock(ExecutorService.class);
Future future = mock(Future.class);
when(launcher.containerLauncher.submit
(any(Callable.class))).thenReturn(future);
when(future.isDone()).thenReturn(false);
when(future.cancel(false)).thenReturn(true);
launcher.init(new Configuration());
launcher.start();
dispatcher.register(ContainersLauncherEventType.class, launcher);
ctxt = mock(ContainerLaunchContext.class);
org.apache.hadoop.yarn.api.records.Container mockContainer =
mock(org.apache.hadoop.yarn.api.records.Container.class);
cId = BuilderUtils.newContainerId(appId, 1, timestamp, id);
when(mockContainer.getId()).thenReturn(cId);
Resource resource = BuilderUtils.newResource(1024, 1);
when(mockContainer.getResource()).thenReturn(resource);
String host = "127.0.0.1";
int port = 1234;
long currentTime = System.currentTimeMillis();
ContainerTokenIdentifier identifier =
new ContainerTokenIdentifier(cId, "127.0.0.1", user, resource,
currentTime + 10000L, 123, currentTime, Priority.newInstance(0), 0);
Token token =
BuilderUtils.newContainerToken(BuilderUtils.newNodeId(host, port),
"password".getBytes(), identifier);
when(mockContainer.getContainerToken()).thenReturn(token);
if (withLocalRes) {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
System.out.println("WrappedContainerLocalResource seed: " + seed);
localResources = createLocalResources(r);
} else {
localResources = Collections.<String, LocalResource> emptyMap();
}
when(ctxt.getLocalResources()).thenReturn(localResources);
when(exec.getLocalResources(any())).thenReturn(localResources);
when(context.getContainerExecutor()).thenReturn(exec);
if (withServiceData) {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
System.out.println("ServiceData seed: " + seed);
serviceData = createServiceData(r);
} else {
serviceData = Collections.<String, ByteBuffer> emptyMap();
}
when(ctxt.getServiceData()).thenReturn(serviceData);
when(ctxt.getContainerRetryContext()).thenReturn(containerRetryContext);
when(context.getDeletionService()).thenReturn(delService);
ContainerStateTransitionListener listener =
new ContainerStateTransitionListener() {
@Override
public void init(Context cntxt) {}
@Override
public void preTransition(ContainerImpl op, ContainerState beforeState,
ContainerEvent eventToBeProcessed) {
initStateToEvent.put(beforeState, eventToBeProcessed.getType());
}
@Override
public void postTransition(ContainerImpl op, ContainerState beforeState,
ContainerState afterState, ContainerEvent processedEvent) {
eventToFinalState.put(processedEvent.getType(), afterState);
}
};
NodeManager.DefaultContainerStateListener multi =
new NodeManager.DefaultContainerStateListener();
multi.addListener(listener);
when(context.getContainerStateTransitionListener()).thenReturn(multi);
c = new ContainerImpl(conf, dispatcher, ctxt, null, metrics, identifier,
context);
dispatcher.register(ContainerEventType.class,
new EventHandler<ContainerEvent>() {
@Override
public void handle(ContainerEvent event) {
c.handle(event);
}
});
dispatcher.start();
}
private void drainDispatcherEvents() {
dispatcher.await();
}
public void finished() {
dispatcher.stop();
}
public void initContainer() {
c.handle(new ContainerEvent(cId, ContainerEventType.INIT_CONTAINER));
drainDispatcherEvents();
}
public void resourceFailedContainer() {
c.handle(new ContainerEvent(cId, ContainerEventType.RESOURCE_FAILED));
drainDispatcherEvents();
}
public void handleContainerResourceFailedEvent() {
c.handle(new ContainerResourceFailedEvent(cId, null, null));
drainDispatcherEvents();
}
// Localize resources
// Skip some resources so as to consider them failed
public Map<Path, List<String>> doLocalizeResources(
boolean checkLocalizingState, int skipRsrcCount)
throws URISyntaxException {
Path cache = new Path("file:///cache");
Map<Path, List<String>> localPaths =
new HashMap<Path, List<String>>();
int counter = 0;
for (Entry<String, LocalResource> rsrc : localResources.entrySet()) {
if (counter++ < skipRsrcCount) {
continue;
}
if (checkLocalizingState) {
assertEquals(ContainerState.LOCALIZING, c.getContainerState());
}
LocalResourceRequest req = new LocalResourceRequest(rsrc.getValue());
Path p = new Path(cache, rsrc.getKey());
localPaths.put(p, Arrays.asList(rsrc.getKey()));
// rsrc copied to p
c.handle(new ContainerResourceLocalizedEvent(c.getContainerId(),
req, p));
}
drainDispatcherEvents();
return localPaths;
}
public Map<Path, List<String>> localizeResources()
throws URISyntaxException {
return doLocalizeResources(true, 0);
}
public void localizeResourcesFromInvalidState(int skipRsrcCount)
throws URISyntaxException {
doLocalizeResources(false, skipRsrcCount);
}
public void failLocalizeSpecificResource(String rsrcKey)
throws URISyntaxException {
LocalResource rsrc = localResources.get(rsrcKey);
LocalResourceRequest req = new LocalResourceRequest(rsrc);
Exception e = new Exception(FAKE_LOCALIZATION_ERROR);
c.handle(new ContainerResourceFailedEvent(c.getContainerId(), req, e
.getMessage()));
drainDispatcherEvents();
}
// fail to localize some resources
public void failLocalizeResources(int failRsrcCount)
throws URISyntaxException {
int counter = 0;
for (Entry<String, LocalResource> rsrc : localResources.entrySet()) {
if (counter >= failRsrcCount) {
break;
}
++counter;
LocalResourceRequest req = new LocalResourceRequest(rsrc.getValue());
Exception e = new Exception(FAKE_LOCALIZATION_ERROR);
c.handle(new ContainerResourceFailedEvent(c.getContainerId(),
req, e.getMessage()));
}
drainDispatcherEvents();
}
public void launchContainer() {
c.handle(new ContainerEvent(cId, ContainerEventType.CONTAINER_LAUNCHED));
drainDispatcherEvents();
}
public void containerSuccessful() {
c.handle(new ContainerEvent(cId,
ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS));
drainDispatcherEvents();
}
public void containerResourcesCleanup() {
c.handle(new ContainerEvent(cId,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
drainDispatcherEvents();
}
public void dockerContainerResourcesCleanup() {
c.handle(new ContainerEvent(cId,
ContainerEventType.CONTAINER_RESOURCES_CLEANEDUP));
// check if containerlauncher cleans up the container launch.
verify(this.launcherBus)
.handle(refEq(new ContainersLauncherEvent(this.c,
ContainersLauncherEventType.CLEANUP_CONTAINER), "timestamp"));
drainDispatcherEvents();
}
public void setupDockerContainerEnv() {
Map<String, String> env = new HashMap<>();
env.put(ContainerRuntimeConstants.ENV_CONTAINER_TYPE,
ContainerRuntimeConstants.CONTAINER_RUNTIME_DOCKER);
when(this.ctxt.getEnvironment()).thenReturn(env);
}
public void containerFailed(int exitCode) {
String diagnosticMsg = "Container completed with exit code " + exitCode;
c.handle(new ContainerExitEvent(cId,
ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, exitCode,
diagnosticMsg));
ContainerStatus containerStatus = c.cloneAndGetContainerStatus();
assert containerStatus.getDiagnostics().contains(diagnosticMsg);
assert containerStatus.getExitStatus() == exitCode;
drainDispatcherEvents();
// If container needs retry, relaunch it
if (c.getContainerState() == ContainerState.RELAUNCHING) {
launchContainer();
}
}
public void killContainer() {
c.handle(new ContainerKillEvent(cId,
ContainerExitStatus.KILLED_BY_RESOURCEMANAGER,
"KillRequest"));
drainDispatcherEvents();
}
public void pauseContainer() {
c.handle(new ContainerPauseEvent(cId,
"PauseRequest"));
drainDispatcherEvents();
}
public void resumeContainer() {
c.handle(new ContainerResumeEvent(cId,
"ResumeRequest"));
drainDispatcherEvents();
}
public void containerKilledOnRequest() {
int exitCode = ContainerExitStatus.KILLED_BY_RESOURCEMANAGER;
String diagnosticMsg = "Container completed with exit code " + exitCode;
c.handle(new ContainerExitEvent(cId,
ContainerEventType.CONTAINER_KILLED_ON_REQUEST, exitCode,
diagnosticMsg));
ContainerStatus containerStatus = c.cloneAndGetContainerStatus();
assert containerStatus.getDiagnostics().contains(diagnosticMsg);
assert containerStatus.getExitStatus() == exitCode;
drainDispatcherEvents();
}
public int getLocalResourceCount() {
return localResources.size();
}
public String getDiagnostics() {
return c.cloneAndGetContainerStatus().getDiagnostics();
}
public SlidingWindowRetryPolicy getRetryPolicy() {
return ((ContainerImpl)c).getRetryPolicy();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.orc.vector;
import org.apache.flink.table.data.ArrayData;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.MapData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.DecimalType;
import org.apache.flink.table.types.logical.LocalZonedTimestampType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.MapType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.TimestampType;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import java.sql.Timestamp;
/** A {@link Vectorizer} of {@link RowData} type element. */
public class RowDataVectorizer extends Vectorizer<RowData> {
private final LogicalType[] fieldTypes;
public RowDataVectorizer(String schema, LogicalType[] fieldTypes) {
super(schema);
this.fieldTypes = fieldTypes;
}
@Override
public void vectorize(RowData row, VectorizedRowBatch batch) {
int rowId = batch.size++;
for (int i = 0; i < row.getArity(); ++i) {
setColumn(rowId, batch.cols[i], fieldTypes[i], row, i);
}
}
private static void setColumn(
int rowId, ColumnVector column, LogicalType type, RowData row, int columnId) {
if (row.isNullAt(columnId)) {
column.noNulls = false;
column.isNull[rowId] = true;
return;
}
switch (type.getTypeRoot()) {
case CHAR:
case VARCHAR:
{
BytesColumnVector vector = (BytesColumnVector) column;
byte[] bytes = row.getString(columnId).toBytes();
vector.setVal(rowId, bytes, 0, bytes.length);
break;
}
case BOOLEAN:
{
LongColumnVector vector = (LongColumnVector) column;
vector.vector[rowId] = row.getBoolean(columnId) ? 1 : 0;
break;
}
case BINARY:
case VARBINARY:
{
BytesColumnVector vector = (BytesColumnVector) column;
byte[] bytes = row.getBinary(columnId);
vector.setVal(rowId, bytes, 0, bytes.length);
break;
}
case DECIMAL:
{
DecimalType dt = (DecimalType) type;
DecimalColumnVector vector = (DecimalColumnVector) column;
vector.set(
rowId,
HiveDecimal.create(
row.getDecimal(columnId, dt.getPrecision(), dt.getScale())
.toBigDecimal()));
break;
}
case TINYINT:
{
LongColumnVector vector = (LongColumnVector) column;
vector.vector[rowId] = row.getByte(columnId);
break;
}
case SMALLINT:
{
LongColumnVector vector = (LongColumnVector) column;
vector.vector[rowId] = row.getShort(columnId);
break;
}
case DATE:
case TIME_WITHOUT_TIME_ZONE:
case INTEGER:
{
LongColumnVector vector = (LongColumnVector) column;
vector.vector[rowId] = row.getInt(columnId);
break;
}
case BIGINT:
{
LongColumnVector vector = (LongColumnVector) column;
vector.vector[rowId] = row.getLong(columnId);
break;
}
case FLOAT:
{
DoubleColumnVector vector = (DoubleColumnVector) column;
vector.vector[rowId] = row.getFloat(columnId);
break;
}
case DOUBLE:
{
DoubleColumnVector vector = (DoubleColumnVector) column;
vector.vector[rowId] = row.getDouble(columnId);
break;
}
case TIMESTAMP_WITHOUT_TIME_ZONE:
{
TimestampType tt = (TimestampType) type;
Timestamp timestamp =
row.getTimestamp(columnId, tt.getPrecision()).toTimestamp();
TimestampColumnVector vector = (TimestampColumnVector) column;
vector.set(rowId, timestamp);
break;
}
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
{
LocalZonedTimestampType lt = (LocalZonedTimestampType) type;
Timestamp timestamp =
row.getTimestamp(columnId, lt.getPrecision()).toTimestamp();
TimestampColumnVector vector = (TimestampColumnVector) column;
vector.set(rowId, timestamp);
break;
}
case ARRAY:
{
ListColumnVector listColumnVector = (ListColumnVector) column;
setColumn(rowId, listColumnVector, type, row, columnId);
break;
}
case MAP:
{
MapColumnVector mapColumnVector = (MapColumnVector) column;
setColumn(rowId, mapColumnVector, type, row, columnId);
break;
}
case ROW:
{
StructColumnVector structColumnVector = (StructColumnVector) column;
setColumn(rowId, structColumnVector, type, row, columnId);
break;
}
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
private static void setColumn(
int rowId,
ListColumnVector listColumnVector,
LogicalType type,
RowData row,
int columnId) {
ArrayData arrayData = row.getArray(columnId);
ArrayType arrayType = (ArrayType) type;
listColumnVector.lengths[rowId] = arrayData.size();
listColumnVector.offsets[rowId] = listColumnVector.childCount;
listColumnVector.childCount += listColumnVector.lengths[rowId];
listColumnVector.child.ensureSize(
listColumnVector.childCount, listColumnVector.offsets[rowId] != 0);
RowData convertedRowData = convert(arrayData, arrayType.getElementType());
for (int i = 0; i < arrayData.size(); i++) {
setColumn(
(int) listColumnVector.offsets[rowId] + i,
listColumnVector.child,
arrayType.getElementType(),
convertedRowData,
i);
}
}
private static void setColumn(
int rowId,
MapColumnVector mapColumnVector,
LogicalType type,
RowData row,
int columnId) {
MapData mapData = row.getMap(columnId);
MapType mapType = (MapType) type;
ArrayData keyArray = mapData.keyArray();
ArrayData valueArray = mapData.valueArray();
mapColumnVector.lengths[rowId] = mapData.size();
mapColumnVector.offsets[rowId] = mapColumnVector.childCount;
mapColumnVector.childCount += mapColumnVector.lengths[rowId];
mapColumnVector.keys.ensureSize(
mapColumnVector.childCount, mapColumnVector.offsets[rowId] != 0);
mapColumnVector.values.ensureSize(
mapColumnVector.childCount, mapColumnVector.offsets[rowId] != 0);
RowData convertedKeyRowData = convert(keyArray, mapType.getKeyType());
RowData convertedValueRowData = convert(valueArray, mapType.getValueType());
for (int i = 0; i < keyArray.size(); i++) {
setColumn(
(int) mapColumnVector.offsets[rowId] + i,
mapColumnVector.keys,
mapType.getKeyType(),
convertedKeyRowData,
i);
setColumn(
(int) mapColumnVector.offsets[rowId] + i,
mapColumnVector.values,
mapType.getValueType(),
convertedValueRowData,
i);
}
}
private static void setColumn(
int rowId,
StructColumnVector structColumnVector,
LogicalType type,
RowData row,
int columnId) {
RowData structRow = row.getRow(columnId, structColumnVector.fields.length);
RowType rowType = (RowType) type;
for (int i = 0; i < structRow.getArity(); i++) {
ColumnVector cv = structColumnVector.fields[i];
setColumn(rowId, cv, rowType.getTypeAt(i), structRow, i);
}
}
/**
* Converting ArrayData to RowData for calling {@link RowDataVectorizer#setColumn(int,
* ColumnVector, LogicalType, RowData, int)} recursively with array.
*
* @param arrayData input ArrayData.
* @param arrayFieldType LogicalType of input ArrayData.
* @return RowData.
*/
private static RowData convert(ArrayData arrayData, LogicalType arrayFieldType) {
GenericRowData rowData = new GenericRowData(arrayData.size());
ArrayData.ElementGetter elementGetter = ArrayData.createElementGetter(arrayFieldType);
for (int i = 0; i < arrayData.size(); i++) {
rowData.setField(i, elementGetter.getElementOrNull(arrayData, i));
}
return rowData;
}
}
| |
package org.apache.lucene.search;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.ReaderUtil;
import org.apache.lucene.util.DummyConcurrentLock;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.locks.Lock;
/** Implements search over a set of <code>Searchables</code>.
*
* <p>Applications usually need only call the inherited {@link #search(Query,int)}
* or {@link #search(Query,Filter,int)} methods.
*/
public class MultiSearcher extends Searcher {
/**
* Document Frequency cache acting as a Dummy-Searcher. This class is no
* full-fledged Searcher, but only supports the methods necessary to
* initialize Weights.
*/
private static class CachedDfSource extends Searcher {
private final Map<Term,Integer> dfMap; // Map from Terms to corresponding doc freqs
private final int maxDoc; // document count
public CachedDfSource(Map<Term,Integer> dfMap, int maxDoc, Similarity similarity) {
this.dfMap = dfMap;
this.maxDoc = maxDoc;
setSimilarity(similarity);
}
@Override
public int docFreq(Term term) {
int df;
try {
df = dfMap.get(term).intValue();
} catch (NullPointerException e) {
throw new IllegalArgumentException("df for term " + term.text()
+ " not available");
}
return df;
}
@Override
public int[] docFreqs(Term[] terms) {
final int[] result = new int[terms.length];
for (int i = 0; i < terms.length; i++) {
result[i] = docFreq(terms[i]);
}
return result;
}
@Override
public int maxDoc() {
return maxDoc;
}
@Override
public Query rewrite(Query query) {
// this is a bit of a hack. We know that a query which
// creates a Weight based on this Dummy-Searcher is
// always already rewritten (see preparedWeight()).
// Therefore we just return the unmodified query here
return query;
}
@Override
public void close() {
throw new UnsupportedOperationException();
}
@Override
public Document doc(int i) {
throw new UnsupportedOperationException();
}
@Override
public Document doc(int i, FieldSelector fieldSelector) {
throw new UnsupportedOperationException();
}
@Override
public Explanation explain(Weight weight,int doc) {
throw new UnsupportedOperationException();
}
@Override
public void search(Weight weight, Filter filter, Collector results) {
throw new UnsupportedOperationException();
}
@Override
public TopDocs search(Weight weight,Filter filter,int n) {
throw new UnsupportedOperationException();
}
@Override
public TopFieldDocs search(Weight weight,Filter filter,int n,Sort sort) {
throw new UnsupportedOperationException();
}
}
private Searchable[] searchables;
private int[] starts;
private int maxDoc = 0;
/** Creates a searcher which searches <i>searchers</i>. */
public MultiSearcher(Searchable... searchables) throws IOException {
this.searchables = searchables;
starts = new int[searchables.length + 1]; // build starts array
for (int i = 0; i < searchables.length; i++) {
starts[i] = maxDoc;
maxDoc += searchables[i].maxDoc(); // compute maxDocs
}
starts[searchables.length] = maxDoc;
}
/** Return the array of {@link Searchable}s this searches. */
public Searchable[] getSearchables() {
return searchables;
}
protected int[] getStarts() {
return starts;
}
// inherit javadoc
@Override
public void close() throws IOException {
for (int i = 0; i < searchables.length; i++)
searchables[i].close();
}
@Override
public int docFreq(Term term) throws IOException {
int docFreq = 0;
for (int i = 0; i < searchables.length; i++)
docFreq += searchables[i].docFreq(term);
return docFreq;
}
// inherit javadoc
@Override
public Document doc(int n) throws CorruptIndexException, IOException {
int i = subSearcher(n); // find searcher index
return searchables[i].doc(n - starts[i]); // dispatch to searcher
}
// inherit javadoc
@Override
public Document doc(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
int i = subSearcher(n); // find searcher index
return searchables[i].doc(n - starts[i], fieldSelector); // dispatch to searcher
}
/** Returns index of the searcher for document <code>n</code> in the array
* used to construct this searcher. */
public int subSearcher(int n) { // find searcher for doc n:
return ReaderUtil.subIndex(n, starts);
}
/** Returns the document number of document <code>n</code> within its
* sub-index. */
public int subDoc(int n) {
return n - starts[subSearcher(n)];
}
@Override
public int maxDoc() throws IOException {
return maxDoc;
}
@Override
public TopDocs search(Weight weight, Filter filter, int nDocs)
throws IOException {
final HitQueue hq = new HitQueue(nDocs, false);
int totalHits = 0;
for (int i = 0; i < searchables.length; i++) { // search each searcher
final TopDocs docs = new MultiSearcherCallableNoSort(DummyConcurrentLock.INSTANCE,
searchables[i], weight, filter, nDocs, hq, i, starts).call();
totalHits += docs.totalHits; // update totalHits
}
final ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
for (int i = hq.size()-1; i >= 0; i--) // put docs in array
scoreDocs[i] = hq.pop();
float maxScore = (totalHits==0) ? Float.NEGATIVE_INFINITY : scoreDocs[0].score;
return new TopDocs(totalHits, scoreDocs, maxScore);
}
@Override
public TopFieldDocs search (Weight weight, Filter filter, int n, Sort sort) throws IOException {
FieldDocSortedHitQueue hq = new FieldDocSortedHitQueue(n);
int totalHits = 0;
float maxScore=Float.NEGATIVE_INFINITY;
for (int i = 0; i < searchables.length; i++) { // search each searcher
final TopFieldDocs docs = new MultiSearcherCallableWithSort(DummyConcurrentLock.INSTANCE,
searchables[i], weight, filter, n, hq, sort, i, starts).call();
totalHits += docs.totalHits; // update totalHits
maxScore = Math.max(maxScore, docs.getMaxScore());
}
final ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
for (int i = hq.size() - 1; i >= 0; i--) // put docs in array
scoreDocs[i] = hq.pop();
return new TopFieldDocs (totalHits, scoreDocs, hq.getFields(), maxScore);
}
// inherit javadoc
@Override
public void search(Weight weight, Filter filter, final Collector collector)
throws IOException {
for (int i = 0; i < searchables.length; i++) {
final int start = starts[i];
final Collector hc = new Collector() {
@Override
public void setScorer(Scorer scorer) throws IOException {
collector.setScorer(scorer);
}
@Override
public void collect(int doc) throws IOException {
collector.collect(doc);
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
collector.setNextReader(reader, start + docBase);
}
@Override
public boolean acceptsDocsOutOfOrder() {
return collector.acceptsDocsOutOfOrder();
}
};
searchables[i].search(weight, filter, hc);
}
}
@Override
public Query rewrite(Query original) throws IOException {
final Query[] queries = new Query[searchables.length];
for (int i = 0; i < searchables.length; i++) {
queries[i] = searchables[i].rewrite(original);
}
return queries[0].combine(queries);
}
@Override
public Explanation explain(Weight weight, int doc) throws IOException {
final int i = subSearcher(doc); // find searcher index
return searchables[i].explain(weight, doc - starts[i]); // dispatch to searcher
}
/**
* Create weight in multiple index scenario.
*
* Distributed query processing is done in the following steps:
* 1. rewrite query
* 2. extract necessary terms
* 3. collect dfs for these terms from the Searchables
* 4. create query weight using aggregate dfs.
* 5. distribute that weight to Searchables
* 6. merge results
*
* Steps 1-4 are done here, 5+6 in the search() methods
*
* @return rewritten queries
*/
@Override
protected Weight createWeight(Query original) throws IOException {
// step 1
final Query rewrittenQuery = rewrite(original);
// step 2
final Set<Term> terms = new HashSet<Term>();
rewrittenQuery.extractTerms(terms);
// step3
final Map<Term,Integer> dfMap = createDocFrequencyMap(terms);
// step4
final int numDocs = maxDoc();
final CachedDfSource cacheSim = new CachedDfSource(dfMap, numDocs, getSimilarity());
return rewrittenQuery.weight(cacheSim);
}
/**
* Collects the document frequency for the given terms form all searchables
* @param terms term set used to collect the document frequency form all
* searchables
* @return a map with a term as the key and the terms aggregated document
* frequency as a value
* @throws IOException if a searchable throws an {@link IOException}
*/
Map<Term, Integer> createDocFrequencyMap(final Set<Term> terms) throws IOException {
final Term[] allTermsArray = terms.toArray(new Term[terms.size()]);
final int[] aggregatedDfs = new int[allTermsArray.length];
for (Searchable searchable : searchables) {
final int[] dfs = searchable.docFreqs(allTermsArray);
for(int j=0; j<aggregatedDfs.length; j++){
aggregatedDfs[j] += dfs[j];
}
}
final HashMap<Term,Integer> dfMap = new HashMap<Term,Integer>();
for(int i=0; i<allTermsArray.length; i++) {
dfMap.put(allTermsArray[i], Integer.valueOf(aggregatedDfs[i]));
}
return dfMap;
}
/**
* A thread subclass for searching a single searchable
*/
static final class MultiSearcherCallableNoSort implements Callable<TopDocs> {
private final Lock lock;
private final Searchable searchable;
private final Weight weight;
private final Filter filter;
private final int nDocs;
private final int i;
private final HitQueue hq;
private final int[] starts;
public MultiSearcherCallableNoSort(Lock lock, Searchable searchable, Weight weight,
Filter filter, int nDocs, HitQueue hq, int i, int[] starts) {
this.lock = lock;
this.searchable = searchable;
this.weight = weight;
this.filter = filter;
this.nDocs = nDocs;
this.hq = hq;
this.i = i;
this.starts = starts;
}
public TopDocs call() throws IOException {
final TopDocs docs = searchable.search (weight, filter, nDocs);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
final ScoreDoc scoreDoc = scoreDocs[j];
scoreDoc.doc += starts[i]; // convert doc
//it would be so nice if we had a thread-safe insert
lock.lock();
try {
if (scoreDoc == hq.insertWithOverflow(scoreDoc))
break;
} finally {
lock.unlock();
}
}
return docs;
}
}
/**
* A thread subclass for searching a single searchable
*/
static final class MultiSearcherCallableWithSort implements Callable<TopFieldDocs> {
private final Lock lock;
private final Searchable searchable;
private final Weight weight;
private final Filter filter;
private final int nDocs;
private final int i;
private final FieldDocSortedHitQueue hq;
private final int[] starts;
private final Sort sort;
public MultiSearcherCallableWithSort(Lock lock, Searchable searchable, Weight weight,
Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i, int[] starts) {
this.lock = lock;
this.searchable = searchable;
this.weight = weight;
this.filter = filter;
this.nDocs = nDocs;
this.hq = hq;
this.i = i;
this.starts = starts;
this.sort = sort;
}
public TopFieldDocs call() throws IOException {
final TopFieldDocs docs = searchable.search (weight, filter, nDocs, sort);
// If one of the Sort fields is FIELD_DOC, need to fix its values, so that
// it will break ties by doc Id properly. Otherwise, it will compare to
// 'relative' doc Ids, that belong to two different searchables.
for (int j = 0; j < docs.fields.length; j++) {
if (docs.fields[j].getType() == SortField.DOC) {
// iterate over the score docs and change their fields value
for (int j2 = 0; j2 < docs.scoreDocs.length; j2++) {
FieldDoc fd = (FieldDoc) docs.scoreDocs[j2];
fd.fields[j] = Integer.valueOf(((Integer) fd.fields[j]).intValue() + starts[i]);
}
break;
}
}
lock.lock();
try {
hq.setFields(docs.fields);
} finally {
lock.unlock();
}
final ScoreDoc[] scoreDocs = docs.scoreDocs;
for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
final FieldDoc fieldDoc = (FieldDoc) scoreDocs[j];
fieldDoc.doc += starts[i]; // convert doc
//it would be so nice if we had a thread-safe insert
lock.lock();
try {
if (fieldDoc == hq.insertWithOverflow(fieldDoc))
break;
} finally {
lock.unlock();
}
}
return docs;
}
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.container.impl.deployment;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.camunda.bpm.container.impl.ContainerIntegrationLogger;
import org.camunda.bpm.container.impl.metadata.BpmPlatformXmlParser;
import org.camunda.bpm.container.impl.metadata.spi.BpmPlatformXml;
import org.camunda.bpm.container.impl.spi.DeploymentOperation;
import org.camunda.bpm.container.impl.spi.DeploymentOperationStep;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.impl.ProcessEngineLogger;
import org.camunda.bpm.engine.impl.util.ClassLoaderUtil;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
/**
* <p>Deployment operation step responsible for parsing and attaching the bpm-platform.xml file.</p>
*
* @author Daniel Meyer
* @author Christian Lipphardt
*
*/
public abstract class AbstractParseBpmPlatformXmlStep extends DeploymentOperationStep {
private final static ContainerIntegrationLogger LOG = ProcessEngineLogger.CONTAINER_INTEGRATION_LOGGER;
public static final String BPM_PLATFORM_XML_FILE = "bpm-platform.xml";
public static final String BPM_PLATFORM_XML_LOCATION = "bpm-platform-xml";
public static final String BPM_PLATFORM_XML_ENVIRONMENT_VARIABLE = "BPM_PLATFORM_XML";
public static final String BPM_PLATFORM_XML_SYSTEM_PROPERTY = "bpm.platform.xml";
public static final String BPM_PLATFORM_XML_RESOURCE_LOCATION = "META-INF/" + BPM_PLATFORM_XML_FILE;
public String getName() {
return "Parsing bpm-platform.xml file";
}
public void performOperationStep(DeploymentOperation operationContext) {
URL bpmPlatformXmlSource = getBpmPlatformXmlStream(operationContext);
ensureNotNull("Unable to find bpm-platform.xml. This file is necessary for deploying the Camunda Platform", "bpmPlatformXmlSource", bpmPlatformXmlSource);
// parse the bpm platform xml
BpmPlatformXml bpmPlatformXml = new BpmPlatformXmlParser().createParse()
.sourceUrl(bpmPlatformXmlSource)
.execute()
.getBpmPlatformXml();
// attach to operation context
operationContext.addAttachment(Attachments.BPM_PLATFORM_XML, bpmPlatformXml);
}
public URL checkValidBpmPlatformXmlResourceLocation(String url) {
url = autoCompleteUrl(url);
URL fileLocation = null;
try {
fileLocation = checkValidUrlLocation(url);
if (fileLocation == null) {
fileLocation = checkValidFileLocation(url);
}
}
catch (MalformedURLException e) {
throw new ProcessEngineException("'" + url + "' is not a valid Camunda Platform configuration resource location.", e);
}
return fileLocation;
}
public String autoCompleteUrl(String url) {
if (url != null) {
LOG.debugAutoCompleteUrl(url);
if (!url.endsWith(BPM_PLATFORM_XML_FILE)) {
String appender;
if (url.contains("/")) {
appender = "/";
} else {
appender = "\\";
}
if (!(url.endsWith("/") || url.endsWith("\\\\"))) {
url += appender;
}
url += BPM_PLATFORM_XML_FILE;
}
LOG.debugAutoCompletedUrl(url);
}
return url;
}
public URL checkValidUrlLocation(String url) throws MalformedURLException {
if (url == null || url.isEmpty()) {
return null;
}
Pattern urlPattern = Pattern.compile("^(https?://).*/bpm-platform\\.xml$", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
Matcher urlMatcher = urlPattern.matcher(url);
if (urlMatcher.matches()) {
return new URL(url);
}
return null;
}
public URL checkValidFileLocation(String url) throws MalformedURLException {
if (url == null || url.isEmpty()) {
return null;
}
Pattern filePattern = Pattern.compile("^(/|[A-z]://?|[A-z]:\\\\).*[/|\\\\]bpm-platform\\.xml$", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
Matcher fileMatcher = filePattern.matcher(url);
if (fileMatcher.matches()) {
File configurationLocation = new File(url);
if (configurationLocation.isAbsolute() && configurationLocation.exists()) {
return configurationLocation.toURI().toURL();
}
}
return null;
}
public URL lookupBpmPlatformXmlLocationFromJndi() {
String jndi = "java:comp/env/" + BPM_PLATFORM_XML_LOCATION;
try {
String bpmPlatformXmlLocation = InitialContext.doLookup(jndi);
URL fileLocation = checkValidBpmPlatformXmlResourceLocation(bpmPlatformXmlLocation);
if (fileLocation != null) {
LOG.foundConfigJndi(jndi, fileLocation.toString());
}
return fileLocation;
}
catch (NamingException e) {
LOG.debugExceptionWhileGettingConfigFromJndi(jndi, e);
return null;
}
}
public URL lookupBpmPlatformXmlLocationFromEnvironmentVariable() {
String bpmPlatformXmlLocation = System.getenv(BPM_PLATFORM_XML_ENVIRONMENT_VARIABLE);
String logStatement = "environment variable [" + BPM_PLATFORM_XML_ENVIRONMENT_VARIABLE + "]";
if (bpmPlatformXmlLocation == null) {
bpmPlatformXmlLocation = System.getProperty(BPM_PLATFORM_XML_SYSTEM_PROPERTY);
logStatement = "system property [" + BPM_PLATFORM_XML_SYSTEM_PROPERTY + "]";
}
URL fileLocation = checkValidBpmPlatformXmlResourceLocation(bpmPlatformXmlLocation);
if (fileLocation != null) {
LOG.foundConfigAtLocation(logStatement, fileLocation.toString());
}
return fileLocation;
}
public URL lookupBpmPlatformXmlFromClassPath(String resourceLocation) {
URL fileLocation = ClassLoaderUtil.getClassloader(getClass()).getResource(resourceLocation);
if (fileLocation != null) {
LOG.foundConfigAtLocation(resourceLocation, fileLocation.toString());
}
return fileLocation;
}
public URL lookupBpmPlatformXmlFromClassPath() {
return lookupBpmPlatformXmlFromClassPath(BPM_PLATFORM_XML_RESOURCE_LOCATION);
}
public URL lookupBpmPlatformXml() {
URL fileLocation = lookupBpmPlatformXmlLocationFromJndi();
if (fileLocation == null) {
fileLocation = lookupBpmPlatformXmlLocationFromEnvironmentVariable();
}
if (fileLocation == null) {
fileLocation = lookupBpmPlatformXmlFromClassPath();
}
return fileLocation;
}
public abstract URL getBpmPlatformXmlStream(DeploymentOperation operationContext);
}
| |
package de.dbis.acis.cloud.Tethys.client;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import javax.servlet.ServletOutputStream;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.StreamingOutput;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.json.JSONConfiguration;
import de.dbis.acis.cloud.Tethys.message.client.MessageAuth;
import de.dbis.acis.cloud.Tethys.message.server.SMessageAuth;
import de.dbis.acis.cloud.Tethys.util.GsonMessageBodyHandler;
/**
* Contains all methods to communicate with Openstack.
*
* @author Gordon Lawrenz <lawrenz@dbis.rwth-aachen.de>
*/
public class OpenstackClient {
// TODO hardcodet!?
private static ClientConfig cfg = null;
private static String protocol = "http://";
private static String externalOpenstackIP ="137.226.58.2";
private static String internalOpenstackIP ="10.255.255.3";
private static String openstackIPForPublishing = externalOpenstackIP;
private static String portKeystoneAdmin = ":35357";
private static String portKeystoneMember = ":5000";
private static String portNovaMember = ":8774";
private static String portSwiftMember = ":8888";
private static String portGlanceMember = ":9292";
/**
* Returns a special ClientConfig to communicate with Openstack.
*
* @return ClientConfig
*/
private static ClientConfig returnClientConfig() {
if(cfg == null) {
cfg = new DefaultClientConfig();
cfg.getClasses().add(GsonMessageBodyHandler.class);
cfg.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, true);
}
return cfg;
}
/**
* Authenticates against Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output: like openstack gives it back.
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject authOpenstack(SMessageAuth smessage, boolean admin) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+ (admin?portKeystoneAdmin:portKeystoneMember) +"/v2.0/tokens");
MessageAuth message = new MessageAuth(smessage);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class,message);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// TODO refactor
/**
* Manipulates the Auth-Response of Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output {"X-Auth-Token":"","expires":"","service-id":"","swift-url":""}
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject manipulateAuthAndReturnToken(SMessageAuth smessage) {
JsonObject output = null;
JsonObject response = authOpenstack(smessage,false);
if(response!=null) {
output = new JsonObject();
output.add("X-Auth-Token", response.getAsJsonObject("access").getAsJsonObject("token").get("id"));
output.add("expires", response.getAsJsonObject("access").getAsJsonObject("token").get("expires"));
//output.add("tenant-id", response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id"));
output.addProperty("swift-url", protocol+openstackIPForPublishing+portSwiftMember+"/v1/AUTH_"+
response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id").getAsString());
}
return output;
}
/**
* Manipulates the Auth-Response of Openstack.
* <p>
* input: {"service":""}, {"username":""}, {"password":""}
* <p>
* output {"X-Auth-Token":"","expires":"","service-id":"","swift-url":""}
*
* @param tenantName
* @param username
* @param password
* @return JsonObject
*/
public static JsonObject adminAuth(SMessageAuth smessage) {
JsonObject output = null;
JsonObject response = authOpenstack(smessage,true);
if(response!=null) {
output = new JsonObject();
output.add("X-Auth-Token", response.getAsJsonObject("access").getAsJsonObject("token").get("id"));
output.add("expires", response.getAsJsonObject("access").getAsJsonObject("token").get("expires"));
output.add("tenant-id", response.getAsJsonObject("access").getAsJsonObject("token").getAsJsonObject("tenant").get("id"));
}
return output;
}
/**
* Gets the limits of a service/tenant in Openstack.
* <p>
* output: like openstack gives it back.
*
* @param xAuthToken
* @param tenantId
* @return JsonObject
*/
public static JsonObject serviceLimits(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/limits");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
/**
* Creates an instance for an service/tenant.
* <p>
* output: like openstack gives it back.
*
* @param xAuthToken
* @param tenantId
* @param name
* @param script
* @param imageRef
* @param flavorRef
* @return JsonObject
*/
public static JsonObject createInstance(String xAuthToken, String tenantId, JsonElement name, JsonElement script, JsonElement imageRef ,JsonElement flavorRef) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers");
JsonObject request = new JsonObject();
JsonObject serverdata = new JsonObject();
serverdata.add("name", name);
serverdata.add("user_data", script);
serverdata.add("imageRef", imageRef);
serverdata.add("flavorRef", flavorRef);
serverdata.addProperty("max_count", "1");
serverdata.addProperty("min_count", "1");
request.add("server", serverdata);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).type(MediaType.APPLICATION_JSON).post(ClientResponse.class,request);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.ACCEPTED) {
output = response.getEntity(JsonObject.class);
}
return output;
}
/**
* Uploads a file to the given container of a service/tenant to Swift.
*
* @param bis
* @param xAuthToken
* @param tenantid
* @param path
* @return ResponseBuilder
* @throws MalformedURLException
* @throws IOException
*/
public static ResponseBuilder uploadFile(InputStream bis, String xAuthToken, String tenantid, String path ) throws MalformedURLException, IOException {
URLConnection urlconnection=null;
urlconnection = (HttpURLConnection) new URL(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path).openConnection();
urlconnection.setDoOutput(true);
urlconnection.setDoInput(true);
if (urlconnection instanceof HttpURLConnection) {
((HttpURLConnection)urlconnection).setRequestMethod("PUT");
((HttpURLConnection)urlconnection).setChunkedStreamingMode(16384);//1024? 4096? 16384?
((HttpURLConnection)urlconnection).setRequestProperty("X-Auth-Token", xAuthToken);
((HttpURLConnection)urlconnection).connect();
}
DataOutputStream bos = new DataOutputStream(urlconnection.getOutputStream());
int i;
while ((i = bis.read()) != -1) {
bos.write(i);
}
bis.close();
bos.flush();
bos.close();
//System.out.println(((HttpURLConnection)urlconnection).getResponseMessage());
//System.out.println(((HttpURLConnection)urlconnection).getContentLength());
//InputStream inputStream = ((HttpURLConnection)urlconnection).getInputStream();
//Object responseObject = ((HttpURLConnection)urlconnection).getContent();
//String responseType = ((HttpURLConnection)urlconnection).getContentType();
int responseCode = ((HttpURLConnection)urlconnection).getResponseCode();
// if ((responseCode>= 200) &&(responseCode<=202) ) {
//
// inputStream = ((HttpURLConnection)urlconnection).getInputStream();
// int j;
// while ((j = inputStream.read()) >0) {
//
// System.out.print((char)j);
//
// }
//
// } else {
//
// inputStream = ((HttpURLConnection)urlconnection).getErrorStream();
//
// }
//inputStream.close();
((HttpURLConnection)urlconnection).disconnect();
JsonObject responseObject = null;
if(responseCode == 201) {
responseObject = new JsonObject();
responseObject.addProperty("swift-url", protocol+openstackIPForPublishing+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
}
return Response.ok(responseObject).status(responseCode);//new ResponseImpl(responseCode, null, responseMessage, String.class); //(responseCode, null, null, null);
}
/**
* Gets all uploaded Files in a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return JsonArray
*/
public static JsonArray getUploadedFiles(String xAuthToken, String tenantid, String path ) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonArray output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonArray.class);
}
return output;
}
public static Status createContainer(String xAuthToken, String tenantid, String containerName){
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+containerName);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).put(ClientResponse.class);
return response.getClientResponseStatus();
}
// /**
// * Gets a file from a given container of a service/tenant in Swift.
// *
// * @param xAuthToken
// * @param tenantid
// * @param path
// * @return the file
// * @throws IOException
// * @throws ClassNotFoundException
// */
// public static ResponseBuilder getFile(String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
// Client client = Client.create(returnClientConfig());
// client.setChunkedEncodingSize(16384);
// WebResource tokens = client.resource(protocol+openstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
// ClientResponse response = tokens.header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
//
// ByteArrayOutputStream bos = new ByteArrayOutputStream();
// IOUtils.copy(response.getEntityInputStream(), bos);
//
// ServletOutputStream
//
// return Response.ok(bos.toByteArray()).type(response.getType());
//
// }
/**
* Gets a file from a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return the file
* @throws IOException
* @throws ClassNotFoundException
*/
public static void getFile(ServletOutputStream bos, String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
Client client = Client.create(returnClientConfig());
client.setChunkedEncodingSize(16384);
WebResource tokens = client.resource(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
ClientResponse response = tokens.header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
InputStream bis = response.getEntityInputStream();
int i;
while ((i = bis.read()) != -1) {
bos.write(i);
}
bis.close();
bos.flush();
bos.close();
//return Response.ok().type(response.getType());
}
/**
* Gets a file from a given container of a service/tenant in Swift.
*
* @param xAuthToken
* @param tenantid
* @param path
* @return the file
* @throws IOException
* @throws ClassNotFoundException
*/
public static Response getFile2(String xAuthToken, String tenantid, String path ) throws IOException, ClassNotFoundException {
StreamingOutput clientOS = null;
HttpURLConnection urlconnection = (HttpURLConnection) new URL(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path).openConnection();
urlconnection.addRequestProperty("X-Auth-Token", xAuthToken);
urlconnection.setDoOutput(true);
urlconnection.setRequestMethod("GET");
int responseCode = urlconnection.getResponseCode();
System.out.println(responseCode);
System.out.println(protocol+internalOpenstackIP+portSwiftMember+"/v1/AUTH_"+tenantid+"/"+path);
if((responseCode >= 200 && responseCode <= 208 )|| responseCode == 226){
final InputStream serviceIS = urlconnection.getInputStream();
clientOS = new StreamingOutput() {
@Override
public void write(OutputStream clientOS) throws IOException, WebApplicationException{
int i;
while ((i = serviceIS.read()) != -1) {
clientOS.write(i);
}
serviceIS.close();
clientOS.flush();
clientOS.close();
}
};
}
return Response.status(responseCode).entity(clientOS).type(urlconnection.getContentType()).build();
}
// REQ: curl -i http://137.226.58.142:35357/v2.0/users -X POST -H "User-Agent: python-keystoneclient" -H "Content-Type: application/json" -H "X-Auth-Token: f1895418260b4c549969d8f4c58e14e9"
// REQ BODY: {"user": {"email": null, "password": "TTest", "enabled": true, "name": "TestUser", "tenantId": null}}
//
// RESP: [200] {'date': 'Wed, 11 Dec 2013 16:19:11 GMT', 'content-type': 'application/json', 'content-length': '122', 'vary': 'X-Auth-Token'}
// RESP BODY: {"user": {"name": "TestUser", "id": "9da4e5a0ef6f411287290da982373838", "tenantId": null, "enabled": true, "email": null}}
public static JsonObject createNewUser(String xAuthToken, String name, String password, String email, String tenantId, Boolean enabled ) {
Response.ResponseBuilder r = null;
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/users");
JsonObject jsonUserData = new JsonObject();
JsonObject jsonUser = new JsonObject();
jsonUserData.addProperty("name", name);
jsonUserData.addProperty("password", password);
jsonUserData.addProperty("email", email);
jsonUserData.addProperty("tenantId", tenantId);
jsonUserData.addProperty("enabled", enabled);
jsonUser.add("user", jsonUserData);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).entity(jsonUser).header("X-Auth-Token", xAuthToken).post(ClientResponse.class,jsonUser);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// REQ: curl -i http://137.226.58.142:35357/v2.0/tenants -X POST -H "User-Agent: python-keystoneclient" -H "Content-Type: application/json" -H "X-Auth-Token: 01a935ea36884711a4f30e06b9bcca30"
// REQ BODY: {"tenant": {"enabled": true, "name": "TestUser", "description": null}}
public static JsonObject createNewService(String service, String description,String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/tenants");
JsonObject jsonTenantData = new JsonObject();
JsonObject jsonTenant = new JsonObject();
jsonTenantData.addProperty("name", service);
jsonTenantData.addProperty("description", description);
jsonTenantData.addProperty("enabled", true);
jsonTenant.add("tenant", jsonTenantData);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).post(ClientResponse.class,jsonTenant);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -X PUT -H 'X-Auth-Token:<token>' https://localhost:35357/v2.0/tenants/<tenantid>/users/<userid>/roles/OS-KSADM/<role-id>
public static JsonObject addUserRole(String tenantid, String userid, String roleid, String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/tenants/"+tenantid+"/users/"+userid+"/roles/OS-KSADM/"+roleid);
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).put(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -i http://137.226.58.2:35357/v2.0/OS-KSADM/roles -X GET -H "X-Auth-Token: "
public static JsonObject getRoles(String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/OS-KSADM/roles");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
// curl -i http://137.226.58.142:35357/v2.0/users -X GET -H "X-Auth-Token: "
public static JsonObject getUsers(String xAuthToken) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portKeystoneAdmin+"/v2.0/users");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/images/detail -X GET -H "X-Auth-Token: 4ffb1aa188804dd4bce98e4ce11d8839"
public static JsonObject getImages(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/images/detail");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/detail -X GET -H "X-Auth-Token: e8e4949e56ab4072be08287d3fd52d3d"
public static JsonObject getInstances(String xAuthToken, String tenantId) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers/detail");
ClientResponse response = tokens.accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).get(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/detail -X GET -H "X-Auth-Token: e8e4949e56ab4072be08287d3fd52d3d"
//curl -i http://137.226.58.142:8774/v2/d34a0c1691fd4bf6b89214e2731c0b33/servers/44268c11-64d9-4b7b-95ea-d63f28c6db5f/action -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "X-Auth-Token: 67bdf0dc06f04d8fb75dfe27ba946ca6" -d '{"os-start": null}'
public static JsonObject doActionOnInstance(String xAuthToken, String tenantId, String instanceId, JsonObject action) {
Client client = Client.create(returnClientConfig());
WebResource tokens = client.resource(protocol+internalOpenstackIP+portNovaMember+"/v2/"+tenantId+"/servers/"+instanceId+"/action");
ClientResponse response = tokens.entity(action).type(MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON).header("X-Auth-Token", xAuthToken).post(ClientResponse.class);
JsonObject output = null;
if(response.getClientResponseStatus()==Status.OK) {
output = response.getEntity(JsonObject.class);
}
return output;
}
}
| |
package com.crawljax.plugins.testcasegenerator.util;
import com.crawljax.plugins.testcasegenerator.report.TestRecord;
import com.crawljax.util.DomUtils;
import com.crawljax.util.FSUtils;
import com.google.gson.Gson;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.imageio.ImageIO;
import javax.inject.Singleton;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
@Singleton
public class WorkDirManager {
private static final Logger LOG = LoggerFactory.getLogger(WorkDirManager.class);
public WorkDirManager() {
}
public void saveTestRecordMap(List<TestRecord> records, int testRunIndex, String url, String testRunFolder) {
File runFile = new File(testRunFolder, "testRun.json");
HashMap<String, TestRecord> recordMap = new HashMap<String, TestRecord>();
for (TestRecord record : records) {
recordMap.put(record.getMethodName(), record);
}
try {
if (!runFile.exists()) {
if (!runFile.getParentFile().exists()) {
runFile.getParentFile().mkdirs();
}
runFile.createNewFile();
}
FileOutputStream file = new FileOutputStream(runFile);
String json = new Gson().toJson(recordMap);
file.write(json.getBytes());
file.close();
copyHTMLReport(testRunFolder, testRunIndex, url, recordMap);
} catch (IOException e) {
LOG.error("Could not save test run record {}", recordMap);
}
}
private void copyHTMLReport(String testRunFolder, int testExecutionNumber, String url, HashMap<String, TestRecord> recordMap) throws IOException {
VelocityEngine engine = new VelocityEngine();
engine.setProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogChute");
engine.setProperty("resource.loader", "file");
engine.setProperty("file.resource.loader.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
engine.init();
VelocityContext context = new VelocityContext();
recordMap.values().forEach(testRecord -> {
testRecord.getDiffs().forEach(testStateDiff -> {
// TODO: Checking the type of differences likes this is fragile.
if (!testStateDiff.getOldState().endsWith(".png")) {
try {
testStateDiff.setOldState(formatDOM(testStateDiff.getOldState()));
testStateDiff.setNewState(formatDOM(testStateDiff.getNewState()));
} catch (IOException | TransformerException io) {
LOG.error("Error while formatting the stripped DOM");
io.printStackTrace();
}
}
});
});
String json = new Gson().toJson(recordMap);
context.put("diff_json", json.replace("\\", "\\\\").replace("`", "\\`"));
context.put("url", url);
context.put("execution", testExecutionNumber);
Template template = engine.getTemplate("TestResults.html.vm");
FSUtils.directoryCheck(testRunFolder);
File f = new File(testRunFolder + File.separator + "TestResults.html");
FileWriter writer = new FileWriter(f);
template.merge(context, writer);
writer.flush();
writer.close();
}
private String formatDOM(String dom) throws TransformerException, IOException {
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.setOutputProperty(OutputKeys.METHOD, "html");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
transformer.transform(new DOMSource(DomUtils.asDocument(dom)),
new StreamResult(new OutputStreamWriter(baos, StandardCharsets.UTF_8)));
baos.flush();
return baos.toString("UTF-8");
}
public void saveTestRecord(TestRecord record, String testFolder) {
File recordFile =
new File(testFolder, "test.json");
try {
if (!recordFile.exists()) {
recordFile.getParentFile().mkdirs();
recordFile.createNewFile();
record.setOutputFolder(recordFile.getParent());
}
FileOutputStream file = new FileOutputStream(recordFile);
file.write((new Gson().toJson(record).getBytes()));
file.close();
// mapper.writeValue(recordFile, record);
} catch (IOException e) {
LOG.error("Could not save crawl record {}", record);
}
}
/*
* public List<TestRecord> loadTestRecords(File testFolder) { List<TestRecord> testRecords = new
* ArrayList<TestRecord>(); File[] testFiles = testFolder.listFiles(); for (File f : testFiles)
* { if (f.isDirectory()) { File record = new File(f, "test.json"); if (record.exists()) {
* TestRecord testRecord = loadTestRecord(record); // clean up records that crashed unexpectedly
* if (testRecord.getTestStatus() != TestStatusType.success && testRecord.getTestStatus() !=
* TestStatusType.failure) testRecord.setTestStatus(TestStatusType.failure); int length =
* testRecords.size(); if (length > 0) { for (int i = 0; i < length; i++) { if
* (testRecords.get(i).getMethodName() < testRecord.getMethodName()) { testRecords.add(i,
* testRecord); break; } } } else testRecords.add(testRecord); } } } return testRecords; }
*/
private static void writeThumbNail(File target, BufferedImage screenshot) throws IOException {
int THUMBNAIL_WIDTH = 100;
int THUMBNAIL_HEIGHT = 100;
BufferedImage resizedImage =
new BufferedImage(THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT, BufferedImage.TYPE_INT_RGB);
Graphics2D g = resizedImage.createGraphics();
g.drawImage(screenshot, 0, 0, THUMBNAIL_WIDTH, THUMBNAIL_HEIGHT, Color.WHITE, null);
g.dispose();
ImageIO.write(resizedImage, "JPEG", target);
}
/*
* public TestRecord loadTestRecord(File testFile) { TestRecord testRecord = null; try {
* testRecord = mapper.readValue(testFile, TestRecord.class); } catch (IOException e) {
* LOG.error("Could not load test {}", testFile.getName()); } return testRecord; }
*/
public int getNumTestRecords(File testRecordsFolder) {
int maxID = -1;
if (testRecordsFolder.exists()) {
/* Get the directories. */
String[] dirs = testRecordsFolder.list(new FilenameFilter() {
@Override
public boolean accept(File current, String name) {
return new File(current, name).isDirectory();
}
});
if (null != dirs && dirs.length > 0) {
/* Find the directory with the highest number. */
maxID = -1;
for (String dir : dirs) {
try {
int id = Integer.parseInt(dir);
maxID = id > maxID ? id : maxID;
} catch (NumberFormatException e) {
continue;
}
}
}
} else {
testRecordsFolder.mkdir();
}
return maxID + 1;
}
/**
* @param testRunFolderPath
* the folder in which tests are
* @param testRecord
* the actual test outcome
* @return the current output folder for the diff.
*/
public File getDiffsFolder(String testRunFolderPath, TestRecord testRecord) {
File methodFolder =
new File(testRunFolderPath + File.separator + testRecord.getMethodName());
if (!methodFolder.exists()) {
methodFolder.mkdir();
}
File diffsFolder = new File(methodFolder, "diffs");
if (!diffsFolder.exists()) {
boolean created = diffsFolder.mkdirs();
checkArgument(created, "Could not create diffs dir");
}
return diffsFolder;
}
}
| |
package org.usergrid.mq;
import static java.lang.Integer.parseInt;
import static org.apache.commons.codec.binary.Base64.decodeBase64;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.apache.commons.lang.StringUtils.removeEnd;
import static org.apache.commons.lang.StringUtils.split;
import static org.usergrid.mq.Query.SortDirection.DESCENDING;
import static org.usergrid.persistence.cassandra.IndexUpdate.indexValueCode;
import static org.usergrid.persistence.cassandra.IndexUpdate.toIndexableValue;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import org.apache.commons.collections.comparators.ComparatorChain;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.usergrid.mq.Query.FilterOperator;
import org.usergrid.mq.Query.FilterPredicate;
import org.usergrid.mq.Query.SortPredicate;
import org.usergrid.persistence.Entity;
import org.usergrid.persistence.EntityPropertyComparator;
import org.usergrid.utils.ListUtils;
import org.usergrid.utils.NumberUtils;
import org.usergrid.utils.StringUtils;
public class QueryProcessor {
private static final Logger logger = LoggerFactory
.getLogger(QueryProcessor.class);
Query query;
String cursor;
List<QuerySlice> slices;
List<FilterPredicate> filters;
List<SortPredicate> sorts;
public QueryProcessor(Query query) {
this.query = query;
cursor = query.getCursor();
filters = query.getFilterPredicates();
sorts = query.getSortPredicates();
process();
}
public Query getQuery() {
return query;
}
public String getCursor() {
return cursor;
}
public List<QuerySlice> getSlices() {
return slices;
}
public List<FilterPredicate> getFilters() {
return filters;
}
public List<SortPredicate> getSorts() {
return sorts;
}
private void process() {
slices = new ArrayList<QuerySlice>();
// consolidate all the filters into a set of ranges
Set<String> names = getFilterPropertyNames();
for (String name : names) {
FilterOperator operator = null;
Object value = null;
RangeValue start = null;
RangeValue finish = null;
for (FilterPredicate f : filters) {
if (f.getPropertyName().equals(name)) {
operator = f.getOperator();
value = f.getValue();
RangePair r = getRangeForFilter(f);
if (r.start != null) {
if ((start == null)
|| (r.start.compareTo(start, false) < 0)) {
start = r.start;
}
}
if (r.finish != null) {
if ((finish == null)
|| (r.finish.compareTo(finish, true) > 0)) {
finish = r.finish;
}
}
}
}
slices.add(new QuerySlice(name, operator, value, start, finish,
null, false));
}
// process sorts
if ((slices.size() == 0) && (sorts.size() > 0)) {
// if no filters, turn first filter into a sort
SortPredicate sort = ListUtils.dequeue(sorts);
slices.add(new QuerySlice(sort.getPropertyName(), null, null, null,
null, null, sort.getDirection() == DESCENDING));
} else if (sorts.size() > 0) {
// match up sorts with existing filters
for (ListIterator<SortPredicate> iter = sorts.listIterator(); iter
.hasNext();) {
SortPredicate sort = iter.next();
QuerySlice slice = getSliceForProperty(sort.getPropertyName());
if (slice != null) {
slice.reversed = sort.getDirection() == DESCENDING;
iter.remove();
}
}
}
// attach cursors to slices
if ((cursor != null) && (cursor.indexOf(':') >= 0)) {
String[] cursors = split(cursor, '|');
for (String c : cursors) {
String[] parts = split(c, ':');
if (parts.length == 2) {
int cursorHashCode = parseInt(parts[0]);
for (QuerySlice slice : slices) {
int sliceHashCode = slice.hashCode();
logger.info("Comparing cursor hashcode "
+ cursorHashCode + " to " + sliceHashCode);
if (sliceHashCode == cursorHashCode) {
if (isNotBlank(parts[1])) {
ByteBuffer cursorBytes = ByteBuffer
.wrap(decodeBase64(parts[1]));
slice.setCursor(cursorBytes);
}
}
}
}
}
}
}
@SuppressWarnings("unchecked")
public List<Entity> sort(List<Entity> entities) {
if ((entities != null) && (sorts.size() > 0)) {
// Performing in memory sort
logger.info("Performing in-memory sort of " + entities.size()
+ " entities");
ComparatorChain chain = new ComparatorChain();
for (SortPredicate sort : sorts) {
chain.addComparator(
new EntityPropertyComparator(sort.getPropertyName()),
sort.getDirection() == DESCENDING);
}
Collections.sort(entities, chain);
}
return entities;
}
private Set<String> getFilterPropertyNames() {
Set<String> names = new LinkedHashSet<String>();
for (FilterPredicate f : filters) {
names.add(f.getPropertyName());
}
return names;
}
public QuerySlice getSliceForProperty(String name) {
for (QuerySlice s : slices) {
if (s.propertyName.equals(name)) {
return s;
}
}
return null;
}
public static class RangeValue {
byte code;
Object value;
boolean inclusive;
public RangeValue(byte code, Object value, boolean inclusive) {
this.code = code;
this.value = value;
this.inclusive = inclusive;
}
public byte getCode() {
return code;
}
public void setCode(byte code) {
this.code = code;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
public boolean isInclusive() {
return inclusive;
}
public void setInclusive(boolean inclusive) {
this.inclusive = inclusive;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + code;
result = prime * result + (inclusive ? 1231 : 1237);
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
RangeValue other = (RangeValue) obj;
if (code != other.code) {
return false;
}
if (inclusive != other.inclusive) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
public int compareTo(RangeValue other, boolean finish) {
if (other == null) {
return 1;
}
if (code != other.code) {
return NumberUtils.sign(code - other.code);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
int c = ((Comparable) value).compareTo(other.value);
if (c != 0) {
return c;
}
if (finish) {
// for finish values, inclusive means <= which is greater than <
if (inclusive != other.inclusive) {
return inclusive ? 1 : -1;
}
} else {
// for start values, inclusive means >= which is lest than >
if (inclusive != other.inclusive) {
return inclusive ? -1 : 1;
}
}
return 0;
}
public static int compare(RangeValue v1, RangeValue v2, boolean finish) {
if (v1 == null) {
if (v2 == null) {
return 0;
}
return -1;
}
return v1.compareTo(v2, finish);
}
}
public static class RangePair {
RangeValue start;
RangeValue finish;
public RangePair(RangeValue start, RangeValue finish) {
this.start = start;
this.finish = finish;
}
public RangeValue getStart() {
return start;
}
public void setStart(RangeValue start) {
this.start = start;
}
public RangeValue getFinish() {
return finish;
}
public void setFinish(RangeValue finish) {
this.finish = finish;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((finish == null) ? 0 : finish.hashCode());
result = prime * result + ((start == null) ? 0 : start.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
RangePair other = (RangePair) obj;
if (finish == null) {
if (other.finish != null) {
return false;
}
} else if (!finish.equals(other.finish)) {
return false;
}
if (start == null) {
if (other.start != null) {
return false;
}
} else if (!start.equals(other.start)) {
return false;
}
return true;
}
}
public RangePair getRangeForFilter(FilterPredicate f) {
Object searchStartValue = toIndexableValue(f.getStartValue());
Object searchFinishValue = toIndexableValue(f.getFinishValue());
if (StringUtils.isString(searchStartValue)
&& StringUtils.isStringOrNull(searchFinishValue)) {
if ("*".equals(searchStartValue)) {
searchStartValue = null;
}
if (searchFinishValue == null) {
searchFinishValue = searchStartValue;
;
}
if ((searchStartValue != null)
&& searchStartValue.toString().endsWith("*")) {
searchStartValue = removeEnd(searchStartValue.toString(), "*");
searchFinishValue = searchStartValue + "\uFFFF";
if (isBlank(searchStartValue.toString())) {
searchStartValue = "\0000";
}
} else if (searchFinishValue != null) {
searchFinishValue = searchFinishValue + "\u0000";
}
}
RangeValue rangeStart = null;
if (searchStartValue != null) {
rangeStart = new RangeValue(indexValueCode(searchStartValue),
searchStartValue,
f.getOperator() != FilterOperator.GREATER_THAN);
}
RangeValue rangeFinish = null;
if (searchFinishValue != null) {
rangeFinish = new RangeValue(indexValueCode(searchFinishValue),
searchFinishValue,
f.getOperator() != FilterOperator.LESS_THAN);
}
return new RangePair(rangeStart, rangeFinish);
}
public static class QuerySlice {
String propertyName;
FilterOperator operator;
Object value;
RangeValue start;
RangeValue finish;
ByteBuffer cursor;
boolean reversed;
QuerySlice(String propertyName, FilterOperator operator, Object value,
RangeValue start, RangeValue finish, ByteBuffer cursor,
boolean reversed) {
this.propertyName = propertyName;
this.operator = operator;
this.value = value;
this.start = start;
this.finish = finish;
this.cursor = cursor;
this.reversed = reversed;
}
public String getPropertyName() {
return propertyName;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public RangeValue getStart() {
return start;
}
public void setStart(RangeValue start) {
this.start = start;
}
public RangeValue getFinish() {
return finish;
}
public void setFinish(RangeValue finish) {
this.finish = finish;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
public ByteBuffer getCursor() {
return cursor;
}
public void setCursor(ByteBuffer cursor) {
this.cursor = cursor;
}
public boolean isReversed() {
return reversed;
}
public void setReversed(boolean reversed) {
this.reversed = reversed;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((finish == null) ? 0 : finish.hashCode());
result = prime * result
+ ((propertyName == null) ? 0 : propertyName.hashCode());
result = prime * result + ((start == null) ? 0 : start.hashCode());
//NOTE. We have explicitly left out direction. According to IndexTest:testCollectionOrdering, a cursor can be used and change direction
//of the ordering.
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
QuerySlice other = (QuerySlice) obj;
if (finish == null) {
if (other.finish != null) {
return false;
}
} else if (!finish.equals(other.finish)) {
return false;
}
if (propertyName == null) {
if (other.propertyName != null) {
return false;
}
} else if (!propertyName.equals(other.propertyName)) {
return false;
}
if (start == null) {
if (other.start != null) {
return false;
}
} else if (!start.equals(other.start)) {
return false;
}
return true;
}
}
}
| |
/*
* Copyright 2012, TopicQuests
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.topicquests.common.api;
/**
* Bootstrap Ontology to provide a <em>core legend</em>
* @author jackpark
*
*/
public interface ITopicQuestsOntology {
public static final String
BASE_URI = "http://topicquests.org/",
CORE_URI = ITopicQuestsOntology.BASE_URI+"core/",
CORE_NS = "tscore",
ROLE_BASED_RELATION_SUFFIX = "_RBReln",
LINGUISTIC_RELATION_SUFFIX = "_LReln",
SIMPLE_ASSERTION_SUFFIX = "_S_Assn",
DESCRIPTION_SUFFIX = "_Description",
CREATOR_SUFFIX = "_Creator",
AIR_SUFFIX = "_Air",
NAME_SUFFIX = "_Name",
GUEST_USER = "guest",
SYSTEM_USER = "SystemUser",
SYSTEM_USER_PASSWORD = "SystemUser!",
//////////////////////////////////////
// Language codes
//////////////////////////////////////
DEFAULT_LANGUAGE = "en", //english
/** Synonym prefix, e.g. SYN_en */
SYNONYM_PREFIX = "SYN_",
/** Opposite relation label/details prefix */
OPPOSITE_RELATION_PREFIX = "OPP_",
//////////////////////////////////////
// CLASS TYPES
//////////////////////////////////////
/**
* Provides a source legend for bootstrap properties
*/
CORE_LEGEND = "CoreLegend",
// SUBJECT_MAP_TYPE = "SubjectMapType",
/**
* Internal SubjectProxy types, subclasses of which are NAME_TYPE_NODE & ????
*/
// SUBJECT_PROXY_TYPE = "SubjectProxyType",
// AIR_PROXY_TYPE = "AirProxyType",
/**
* Creates a SubjectProxy that aggregates merged SubjectProxy objects
*/
// VIRTUAL_PROXY_TYPE = "VirtualProxyType",
/**
* Creates a SubjectProxy that links to a SubjectProxy in a different database
*/
// REMOTE_PROXY_TYPE = "RemoteProxyType",
TYPE_TYPE = "TypeType",
CLASS_TYPE = "ClassType",
//needed in export of a tuple
NODE_TYPE = "NodeType",
// TUPLE_TYPE = "TypleType",
GRAPH_TYPE = "GraphType",
//needed in merge and export of a tuple
VIRTUAL_NODE_TYPE = "VirtualNodeType",
ONTOLOGY_TYPE = "OntologyType",
RULE_TYPE = "RuleType",
MERGE_RULE_TYPE = "MergeRuleType",
RESOURCE_TYPE = "ResourceType",
WEB_RESOURCE_TYPE = "WebResourceType",
RELATION_TYPE = "RelationType",
ROLE_TYPE = "RoleType",
USER_TYPE = "UserType",
UNKNOWN_USER_TYPE = "UnknownUserType",
/**
* A USER_TYPE generated on import from a different map
*/
FOREIGN_USER_TYPE = "ForeignUserType",
AGENT_TYPE = "AgentType",
MERGE_AGENT_TYPE = "MergeAgentType",
HARVEST_AGENT_TYPE = "HarvestAgentType",
/**
* The following <em>LEGEND</em> types are defined for exporting.
* Is a CLASS_TYPE
*/
LEGEND_TYPE = "LegendType",
SCOPE_TYPE = "ScopeType",
MERGE_RULE_SCOPE_TYPE = "MergeRuleScopeType",
THEME_TYPE = "ThemeType",
///////////////////////
// Assertions
///////////////////////
ASSERTION_TYPE = "AssertionType",
MERGE_ASSERTION_TYPE = "MergeAssertionType",
POSSIBLE_MERGE_ASSERTIONTYPE = "PossibleMergeAssertionType",
UNMERGE_ASSERTION_TYPE = "UnMergeAssertionType",
ROLE_BASED_RELATION_TYPE = "RoleBasedRelationType",
LINGUISTIC_RELATION_TYPE = "linguisticRelationType",
SIMPLE_ASSERTION_TYPE = "SimpleAssertionType",
/**
* Predefines as subclass of LEGEND_TYPE: user must subclass which assertion type
*/
LEGEND_ASSERTION_TYPE = "LegendAssertionType",
PROPERTY_TYPE = "PropertyType",
//////////////////////////////////////
// PROPERTY TYPES
//////////////////////////////////////
SUBCLASS_OF_PROPERTY_TYPE = "sbOf",
// HAS_SUBCLASSES_PROPERTY_TYPE = "HasSubclasses",
INSTANCE_OF_PROPERTY_TYPE = "inOf",
// HAS_INSTANCES_PROPERTY_TYPE = "HasInstances",
/**
* A multi-valued property which is all the parents of this node
*/
TRANSITIVE_CLOSURE_PROPERTY_TYPE = "trCl",
/**
* Predefines as subclass of LEGEND_TYPE and PROPERTY_TYPE
*/
LEGEND_PROPERTY_TYPE = "LegendPropertyType", //later
/**
* In theory, used to identify foreign legends for imported resources
*/
DEFINING_LEGEND_PROPERTY_TYPE = "DefiningLegendPropertyType", //later
// VERSION_PROPERTY_TYPE = "VersionPropertyType",
// FOR ONTOLOGIES
ONTOLOGY_OBJECT_PROPERTY_TYPE = "OntologyObjectPropertyType",
ONTOLOGY_INVERSE_FUNDTIONAL_PROPERTY_TYPE = "OntologyInverseFunctionalPropertyType",
ONTOLOGY_FUNCTIONAL_PROPERTY_TYPE = "OntologyFunctionalPropertyType",
ONTOLOGY_DATATYPE_PROPERTY_TYPE = "OntologyDatatypePropertyType",
ONTOLOGY_ANNOTATION_PROPERTY_TYPE = "OntologyAnnotationPropertyType",
DOMAIN_PROPERTY_TYPE = "DomainPropertyType",
RANGE_PROPERTY_TYPE = "RangePropertyType",
EQUIVALENT_TO_PROPERTY_TYPE = "EquivalentToPropertyType",
INVERSE_OF_PROPERTY_TYPE = "InverseOfPropertyType",
VERSION = "_ver",
/**
* List of String values (integers) of version numbers available for an AIRProxy SubjectProxy
*/
// VERSION_LIST_PROPERTY = "VersionListPropertyType",
// can't see where this is used
TRANSCLUDED_ROOT_PROXY_PROPERTY = "TranscludedRootProxyProperty", //????
TRANSCLUDE_LIST_PROPERTY = "tclL",
/**
* Added to Merge Assertion Node
*/
MERGE_REASON_RULES_PROPERTY = "mrgRnRlL",
/** each node can have one and only one merge tuple */
MERGE_TUPLE_PROPERTY = "mrgT",
MERGE_LIST_PROPERTY = "mergeList", //NOT USED
/** some nodes which represent web pages might be href'd by other pages */
BACKLINK_LIST_PROPERTY = "bklkL",
/**
* If a proxy has a RestrictionProperty, then it might be:
* <ul>
* <li>Not public</li>
* <li>Editable by only selected people</li>
* <li>...</li>
* </ul>
*/
RESTRICTION_PROPERTY_TYPE = "rstns",
//from IConceptualGraph
GRAPH_CONCEPT_LIST_PROPERTY_TYPE = "graphconlist",
GRAPH_RELATION_LIST_PROPERTY_TYPE = "graphrelnlist",
GRAPH_PARENT_GRAPH_PROPERTY_TYPE = "graphparent",
/** list of symbols of Scope topics */
SCOPE_LIST_PROPERTY_TYPE = "scpL",
PSI_PROPERTY_TYPE = "psi",
/**
* Label is e.g. the IBIS statement in a node
*/
LABEL_PROPERTY = "label",
/**
* Very short label: 70 characters, like DebateGraph
*/
SMALL_LABEL_PROPERTY = "smallLabel",
/**
* Details are like a description except just one per node
*/
DETAILS_PROPERTY = "details",
/**
* The official, unique identifier for any proxy (node)
*/
LOCATOR_PROPERTY = "lox",
RESOURCE_URL_PROPERTY = "url",
////////////////////////////
//Dealing with Tuples
////////////////////////////
/** used for unrestricted tuples -- all nodes are public */
TUPLE_LIST_PROPERTY = "tpL",
/** used where any node in the tuple is not public */
TUPLE_LIST_PROPERTY_RESTRICTED = "tpLr",
//used for pivots
PIVOT_LIST_PROPERTY = "pvL",
RESTRICTED_PIVOT_LIST_PROPERTY = "rpvL",
TUPLE_OBJECT_PROPERTY = "tupO",
TUPLE_OBJECT_TYPE_PROPERTY = "tupOT",
TUPLE_SUBJECT_PROPERTY = "tupS",
TUPLE_SUBJECT_TYPE_PROPERTY = "tupST",
TUPLE_IS_TRANSCLUDE_PROPERTY = "isTrcld",
TUPLE_SUBJECT_ROLE_PROPERTY = "tupSR",
TUPLE_OBJECT_ROLE_PROPERTY = "tupOR",
TUPLE_THEME_PROPERTY = "tupTh",
TUPLE_SIGNATURE_PROPERTY = "tupSig",
LARGE_IMAGE_PATH = "lIco",
SMALL_IMAGE_PATH = "sIco",
CREATOR_ID_PROPERTY = "crtr",
//Date
CREATED_DATE_PROPERTY = "crDt",
LAST_EDIT_DATE_PROPERTY = "lEdDt",
//boolean
IS_PRIVATE_PROPERTY = "isPrv",
IS_VIRTUAL_PROXY = "isVrt",
//true if node has been federated, default: false
IS_FEDERATED = "isFederated",
IS_LIVE = "isLiv",
CONVERSATION_NODE_TYPE = "conTyp",
CHILD_NODE_LIST = "cNL",
PARENT_NODE_LIST = "pNL",
CONVERSATION_ROOT = "conRt",
SORT_DATE = "srtDt",
//used for infoBoxes
INFO_BOX_LIST_PROPERTY ="infL",
//cardinality of tuples stored for sorting (e.g. popular tags)
TUPLE_COUNT = "tpC",
///////////////////////////
// Air properties
///////////////////////////
AIR_SUBJECT_PROPERTY = "subj",
AIR_SUBJECT_VERSION_PROPERTY = "subjv",
AIR_BODY_PROPERTY = "body",
AIR_BODY_VERSION_PROPERTY = "bodyv",
RELATION_WEIGHT = "relWt";
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
* <p>
* Contains the output of DescribeVpcAttribute.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeVpcAttributeResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The ID of the VPC.
* </p>
*/
private String vpcId;
/**
* <p>
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the Amazon DNS
* server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.
* </p>
*/
private Boolean enableDnsSupport;
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is <code>true</code>,
* instances in the VPC get DNS hostnames; otherwise, they do not.
* </p>
*/
private Boolean enableDnsHostnames;
/**
* <p>
* The ID of the VPC.
* </p>
*
* @param vpcId
* The ID of the VPC.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The ID of the VPC.
* </p>
*
* @return The ID of the VPC.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The ID of the VPC.
* </p>
*
* @param vpcId
* The ID of the VPC.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcAttributeResult withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the Amazon DNS
* server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.
* </p>
*
* @param enableDnsSupport
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the
* Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses;
* otherwise, it does not.
*/
public void setEnableDnsSupport(Boolean enableDnsSupport) {
this.enableDnsSupport = enableDnsSupport;
}
/**
* <p>
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the Amazon DNS
* server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.
* </p>
*
* @return Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the
* Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses;
* otherwise, it does not.
*/
public Boolean getEnableDnsSupport() {
return this.enableDnsSupport;
}
/**
* <p>
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the Amazon DNS
* server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.
* </p>
*
* @param enableDnsSupport
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the
* Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses;
* otherwise, it does not.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcAttributeResult withEnableDnsSupport(Boolean enableDnsSupport) {
setEnableDnsSupport(enableDnsSupport);
return this;
}
/**
* <p>
* Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the Amazon DNS
* server resolves DNS hostnames for your instances to their corresponding IP addresses; otherwise, it does not.
* </p>
*
* @return Indicates whether DNS resolution is enabled for the VPC. If this attribute is <code>true</code>, the
* Amazon DNS server resolves DNS hostnames for your instances to their corresponding IP addresses;
* otherwise, it does not.
*/
public Boolean isEnableDnsSupport() {
return this.enableDnsSupport;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is <code>true</code>,
* instances in the VPC get DNS hostnames; otherwise, they do not.
* </p>
*
* @param enableDnsHostnames
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is
* <code>true</code>, instances in the VPC get DNS hostnames; otherwise, they do not.
*/
public void setEnableDnsHostnames(Boolean enableDnsHostnames) {
this.enableDnsHostnames = enableDnsHostnames;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is <code>true</code>,
* instances in the VPC get DNS hostnames; otherwise, they do not.
* </p>
*
* @return Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is
* <code>true</code>, instances in the VPC get DNS hostnames; otherwise, they do not.
*/
public Boolean getEnableDnsHostnames() {
return this.enableDnsHostnames;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is <code>true</code>,
* instances in the VPC get DNS hostnames; otherwise, they do not.
* </p>
*
* @param enableDnsHostnames
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is
* <code>true</code>, instances in the VPC get DNS hostnames; otherwise, they do not.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeVpcAttributeResult withEnableDnsHostnames(Boolean enableDnsHostnames) {
setEnableDnsHostnames(enableDnsHostnames);
return this;
}
/**
* <p>
* Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is <code>true</code>,
* instances in the VPC get DNS hostnames; otherwise, they do not.
* </p>
*
* @return Indicates whether the instances launched in the VPC get DNS hostnames. If this attribute is
* <code>true</code>, instances in the VPC get DNS hostnames; otherwise, they do not.
*/
public Boolean isEnableDnsHostnames() {
return this.enableDnsHostnames;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId()).append(",");
if (getEnableDnsSupport() != null)
sb.append("EnableDnsSupport: ").append(getEnableDnsSupport()).append(",");
if (getEnableDnsHostnames() != null)
sb.append("EnableDnsHostnames: ").append(getEnableDnsHostnames());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeVpcAttributeResult == false)
return false;
DescribeVpcAttributeResult other = (DescribeVpcAttributeResult) obj;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getEnableDnsSupport() == null ^ this.getEnableDnsSupport() == null)
return false;
if (other.getEnableDnsSupport() != null && other.getEnableDnsSupport().equals(this.getEnableDnsSupport()) == false)
return false;
if (other.getEnableDnsHostnames() == null ^ this.getEnableDnsHostnames() == null)
return false;
if (other.getEnableDnsHostnames() != null && other.getEnableDnsHostnames().equals(this.getEnableDnsHostnames()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime * hashCode + ((getEnableDnsSupport() == null) ? 0 : getEnableDnsSupport().hashCode());
hashCode = prime * hashCode + ((getEnableDnsHostnames() == null) ? 0 : getEnableDnsHostnames().hashCode());
return hashCode;
}
@Override
public DescribeVpcAttributeResult clone() {
try {
return (DescribeVpcAttributeResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.analyzer.driver;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import com.asakusafw.dmdl.DmdlTesterRoot;
import com.asakusafw.dmdl.semantics.DmdlSemantics;
import com.asakusafw.dmdl.semantics.ModelDeclaration;
import com.asakusafw.dmdl.semantics.ModelSymbol;
import com.asakusafw.dmdl.semantics.trait.ProjectionsTrait;
import com.asakusafw.dmdl.semantics.trait.ReferencesTrait;
/**
* Test for {@link AutoProjectionDriver}.
*/
public class AutoProjectionDriverTest extends DmdlTesterRoot {
/**
* Initializes the test.
* @throws Exception if some errors were occurred
*/
@Before
public void setUp() throws Exception {
attributeDrivers.add(new AutoProjectionDriver());
}
/**
* auto projection.
*/
@Test
public void auto_projection() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(1));
assertThat(projections, has(model("p1")));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(1));
assertThat(references, has(model("p1")));
}
/**
* includes subset.
*/
@Test
public void auto_projection_subset() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(1));
assertThat(projections, has(model("p1")));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(1));
assertThat(references, has(model("p1")));
}
/**
* does not include superset.
*/
@Test
public void auto_projection_superset() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(0));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(0));
}
/**
* does not include superset.
*/
@Test
public void auto_projection_incompatible() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(0));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(0));
}
/**
* only includes projective models.
*/
@Test
public void auto_projection_record() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(0));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(0));
}
/**
* already includes some projections.
*/
@Test
public void auto_projection_already() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(2));
assertThat(projections, has(model("p1")));
assertThat(projections, has(model("p2")));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(2));
assertThat(references, has(model("p1")));
assertThat(references, has(model("p2")));
}
/**
* auto projection for summarization.
*/
@Test
public void auto_projection_summarize() {
DmdlSemantics world = resolve();
ModelDeclaration model = world.findModelDeclaration("simple");
assertThat(model.getSymbol(), is(model("simple")));
List<ModelSymbol> projections = projections(model);
assertThat(projections.size(), is(1));
assertThat(projections, has(model("total")));
List<ModelSymbol> references = references(model);
assertThat(references.size(), is(1));
assertThat(references, has(model("total")));
}
/**
* references.
*/
@Test
public void auto_projection_ref() {
DmdlSemantics world = resolve(new String[] {
"@auto_projection",
"m = {",
" a : INT;",
" ref = {a};",
"};",
"projective p = {",
" ref : {INT};",
"};",
});
ModelDeclaration model = world.findModelDeclaration("m");
List<ModelSymbol> projections = projections(model);
assertThat(projections, hasSize(1));
assertThat(projections, contains(model("p")));
}
/**
* only projection has references.
*/
@Test
public void auto_projection_ref_nothing() {
DmdlSemantics world = resolve(new String[] {
"@auto_projection",
"m = {",
" a : INT;",
"};",
"projective p = {",
" ref : {INT};",
"};",
});
ModelDeclaration model = world.findModelDeclaration("m");
List<ModelSymbol> projections = projections(model);
assertThat(projections, hasSize(0));
}
/**
* references w/ body.
*/
@Test
public void auto_projection_ref_body() {
DmdlSemantics world = resolve(new String[] {
"@auto_projection",
"m = {",
" a : INT;",
" ref = {a};",
"};",
"projective p = {",
" ref : {INT} = {};",
"};",
});
ModelDeclaration model = world.findModelDeclaration("m");
List<ModelSymbol> projections = projections(model);
assertThat(projections, hasSize(0));
}
/**
* references w/ inconsistent type.
*/
@Test
public void auto_projection_ref_inconsistent_type() {
DmdlSemantics world = resolve(new String[] {
"@auto_projection",
"m = {",
" a : INT;",
" ref = {a};",
"};",
"projective p = {",
" ref : {LONG};",
"};",
});
ModelDeclaration model = world.findModelDeclaration("m");
List<ModelSymbol> projections = projections(model);
assertThat(projections, hasSize(0));
}
/**
* attribute is attached to property.
*/
@Test
public void invalid_auto_projection_property() {
shouldSemanticError();
}
/**
* extra element.
*/
@Test
public void invalid_auto_projection_extra() {
shouldSemanticError();
}
private static List<ModelSymbol> projections(ModelDeclaration model) {
return model.findTrait(ProjectionsTrait.class)
.map(ProjectionsTrait::getProjections)
.orElse(Collections.emptyList());
}
private static List<ModelSymbol> references(ModelDeclaration model) {
return model.findTrait(ReferencesTrait.class)
.map(ReferencesTrait::getReferences)
.orElse(Collections.emptyList());
}
}
| |
package seedu.gtd.logic.parser;
import static seedu.gtd.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.gtd.commons.core.Messages.START_END_DATE_INVALID_COMMAND_FORMAT;
import static seedu.gtd.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import seedu.gtd.commons.exceptions.IllegalValueException;
import seedu.gtd.commons.util.StringUtil;
import seedu.gtd.logic.commands.*;
/**
* Parses user input.
*/
public class Parser {
//@@author addressbook-level4
/**
* Used for initial separation of command word and args.
*/
private static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<commandWord>\\S+)(?<arguments>.*)");
private static final Pattern TASK_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)");
private static final Pattern KEYWORDS_ARGS_FORMAT =
Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace
// private static final Pattern TASK_DATA_ARGS_FORMAT = // '/' forward slashes are reserved for delimiter prefixes
// Pattern.compile("(?<name>[^/]+)"
// + " d/(?<dueDate>[^/]+)"
// + " a/(?<address>[^/]+)"
// + " p/(?<priority>[^/]+)"
// + "(?<tagArguments>(?: t/[^/]+)*)"); // variable number of tags
//@@author A0130677A
private static final Pattern NAME_TASK_DATA_ARGS_FORMAT =
Pattern.compile("(?<name>[^/]+) (s|t|p|a|d|z)/.*");
private static final Pattern PRIORITY_TASK_DATA_ARGS_FORMAT =
Pattern.compile(".* p/(?<priority>[^/]+) (s|t|a|d|z)/.*");
private static final Pattern ADDRESS_TASK_DATA_ARGS_FORMAT =
Pattern.compile(".* a/(?<address>[^/]+) (s|t|p|d|z)/.*");
private static final Pattern STARTDATE_TASK_DATA_ARGS_FORMAT =
Pattern.compile(".* s/(?<startDate>[^/]+) (d|t|a|p|z)/.*");
private static final Pattern DUEDATE_TASK_DATA_ARGS_FORMAT =
Pattern.compile(".* d/(?<dueDate>[^/]+) (s|t|a|p|z)/.*");
private static final Pattern TAGS_TASK_DATA_ARGS_FORMAT =
Pattern.compile(".* t/(?<tagArguments>[^/]+) (s|d|a|p|z)/.*");
//@@author addressbook-level4
private static final Pattern EDIT_DATA_ARGS_FORMAT =
Pattern.compile("(?<targetIndex>\\S+)"
+ " (?<newDetails>\\S+(?:\\s+\\S+)*)");
public Parser() {}
/**
* Parses user input into command for execution.
*
* @param userInput full user input string
* @return the command based on the user input
*/
public Command parseCommand(String userInput) {
final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
final String commandWord = matcher.group("commandWord");
final String arguments = matcher.group("arguments");
switch (commandWord) {
case AddCommand.COMMAND_WORD:
return prepareAdd(arguments);
case EditCommand.COMMAND_WORD:
return prepareEdit(arguments);
case SelectCommand.COMMAND_WORD:
return prepareSelect(arguments);
case DeleteCommand.COMMAND_WORD:
return prepareDelete(arguments);
case DoneCommand.COMMAND_WORD:
return prepareDone(arguments);
case ClearCommand.COMMAND_WORD:
return new ClearCommand();
case FindCommand.COMMAND_WORD:
return prepareFind(arguments);
case ListCommand.COMMAND_WORD:
return prepareList(arguments);
case ExitCommand.COMMAND_WORD:
return new ExitCommand();
case HelpCommand.COMMAND_WORD:
return prepareHelp(arguments);
case UndoCommand.COMMAND_WORD:
return new UndoCommand();
case SetFilePathCommand.COMMAND_WORD:
return prepareSetFilePath(arguments);
default:
return new IncorrectCommand(MESSAGE_UNKNOWN_COMMAND);
}
}
/**
* Parses arguments in the context of the add task command.
*
* @param args full command args string
* @return the prepared command
*/
//@@author A0130677A
private Command prepareAdd(String args){
String preprocessedArg = appendEnd(args.trim());
final Matcher nameMatcher = NAME_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
final Matcher startDateMatcher = STARTDATE_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
final Matcher dueDateMatcher = DUEDATE_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
final Matcher addressMatcher = ADDRESS_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
final Matcher priorityMatcher = PRIORITY_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
final Matcher tagsMatcher = TAGS_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArg);
String nameToAdd = checkEmptyAndAddDefault(nameMatcher, "name", "nil");
String startDateToAdd = checkEmptyAndAddDefault(startDateMatcher, "startDate", "nil");
String dueDateToAdd = checkEmptyAndAddDefault(dueDateMatcher, "dueDate", "nil");
String addressToAdd = checkEmptyAndAddDefault(addressMatcher, "address", "nil");
String priorityToAdd = checkEmptyAndAddDefault(priorityMatcher, "priority", "1");
// format date if due date or start date is specified
Date dueDateInDateFormat = null;
Date startDateInDateFormat = null;
if (dueDateMatcher.matches()) {
dueDateInDateFormat = getDateInDateFormat(dueDateToAdd);
dueDateToAdd = parseDueDate(dueDateToAdd);
System.out.println(dueDateInDateFormat);
}
if (startDateMatcher.matches()) {
startDateInDateFormat = getDateInDateFormat(startDateToAdd);
startDateToAdd = parseDueDate(startDateToAdd);
}
// check that end date is strictly later than start date
if (dueDateInDateFormat != null && startDateInDateFormat != null
&& dueDateInDateFormat.compareTo(startDateInDateFormat) < 0) {
return new IncorrectCommand(START_END_DATE_INVALID_COMMAND_FORMAT);
}
Set<String> tagsProcessed = Collections.emptySet();
if (tagsMatcher.matches()) {
tagsProcessed = getTagsFromArgs(tagsMatcher.group("tagArguments"));
}
// Validate arg string format
if (!nameMatcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE));
}
try {
return new AddCommand(
nameToAdd,
startDateToAdd,
dueDateToAdd,
addressToAdd,
priorityToAdd,
tagsProcessed
);
} catch (IllegalValueException ive) {
return new IncorrectCommand(ive.getMessage());
}
}
private String appendEnd(String args) {
return args + " z/";
}
private String checkEmptyAndAddDefault(Matcher matcher, String groupName, String defaultValue) {
if (matcher.matches()) {
return matcher.group(groupName);
} else {
return defaultValue;
}
}
//@@author A0146130W
private String parseDueDate(String dueDateRaw) {
NaturalLanguageProcessor nlp = new DateNaturalLanguageProcessor();
return nlp.formatString(dueDateRaw);
}
//@@author A0130677A
private Date getDateInDateFormat(String dueDateRaw) {
NaturalLanguageProcessor nlp = new DateNaturalLanguageProcessor();
return nlp.getDate(dueDateRaw);
}
// remove time on date parsed to improve search results
private String removeTimeOnDate(String dueDateRaw) {
String[] dateTime = dueDateRaw.split(" ");
return dateTime[0];
}
//@@author addressbook-level4
/**
* Extracts the new task's tags from the add command's tag arguments string.
* Merges duplicate tag strings.
*/
private static Set<String> getTagsFromArgs(String tagArguments) {
// no tags
if (tagArguments.isEmpty()) {
return Collections.emptySet();
}
// replace first delimiter prefix, then split
final Collection<String> tagStrings = Arrays.asList(tagArguments.split(" "));
return new HashSet<>(tagStrings);
}
//@@author A0146130W
/**
* Parses arguments in the context of the edit task command.
*
* @param args full command args string
* @return the prepared command
*/
private Command prepareEdit(String args) {
Optional<Integer> index = parseIndex(args, EDIT_DATA_ARGS_FORMAT);
final Matcher matcher = EDIT_DATA_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, EditCommand.MESSAGE_USAGE));
}
final String[] splitNewDetails = matcher.group("newDetails").split("\\s+");
ArrayList<String> combinedDetails = combineSameDetails(splitNewDetails);
Hashtable<String, String> newDetailsSet = new Hashtable<String, String>();
for (String detail : combinedDetails) {
String detailType = extractDetailType(detail);
String preparedNewDetail = prepareNewDetail(detailType, detail);
System.out.println("before adding to hashtable: " + detailType + " " + preparedNewDetail);
newDetailsSet.put(detailType, preparedNewDetail);
}
return new EditCommand(
index.get()-1,
newDetailsSet
);
}
private ArrayList<String> combineSameDetails(String[] details) {
ArrayList<String> alDetails = new ArrayList<String>(Arrays.asList(details));
System.out.println(alDetails.toString());
String name = new String();
String address = new String();
String dueDate = new String();
String priority = new String();
int currentDetailType = 0;
if(alDetails.size() == 1) {
return alDetails;
}
for (String detail: alDetails) {
System.out.println("detail: " + detail);
if(extractDetailType(detail).equals("name")) {
System.out.println("current detail type: " + currentDetailType);
switch(currentDetailType) {
case 1: address = address + " " + detail; break;
case 2: dueDate = dueDate + " " + detail; break;
case 3: priority = priority + " " + detail; break;
default: {
if(name.isEmpty()) name = detail;
else name = name + " " + detail;
break;
}
}
}
else if(extractDetailType(detail).equals("address")) {
System.out.println("detected address " + detail);
address = detail;
currentDetailType = 1;
}
else if(extractDetailType(detail).equals("dueDate")) {
System.out.println("detected dueDate " + detail);
dueDate = detail;
currentDetailType = 2;
}
else if(extractDetailType(detail).equals("priority")) {
System.out.println("detected priority " + detail);
address = detail;
currentDetailType = 3;
}
}
ArrayList<String> finalCombined = new ArrayList<String>();
//does not remove the separate words from the list, they will be overwritten by the final combined string
if(!name.isEmpty()) finalCombined.add(name);
System.out.println("from combining name: " + name);
if(!address.isEmpty()) finalCombined.add(address);
System.out.println("from combining address: " + address);
if(!dueDate.isEmpty()) finalCombined.add(dueDate);
if(!priority.isEmpty()) finalCombined.add(priority);
System.out.println("from combining: " + finalCombined.toString());
return finalCombined;
}
private String removeDetailPrefix(String detailWithPrefix) {
return detailWithPrefix.substring(detailWithPrefix.indexOf('/') + 1);
}
private String prepareNewDetail(String detailType, String detailWithPrefix) {
String detail = removeDetailPrefix(detailWithPrefix);
if(detailType.equals("dueDate")) detail = parseDueDate(detail);
return detail;
}
//@@author A0146130W-reused
private String extractDetailType(String args) {
String preprocessedArgs = " " + appendEnd(args.trim());
final Matcher dueDateMatcher = DUEDATE_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher addressMatcher = ADDRESS_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher priorityMatcher = PRIORITY_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
if(addressMatcher.matches()) {
return "address";
}
else if(dueDateMatcher.matches()) {
return "dueDate";
}
else if(priorityMatcher.matches()) {
return "priority";
}
return "name";
}
//@@author addressbook-level4
/**
* Parses arguments in the context of the delete task command.
*
* @param args full command args string
* @return the prepared command
*/
private Command prepareDelete(String args) {
Optional<Integer> index = parseIndex(args);
if(!index.isPresent()){
return new IncorrectCommand(
String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE));
}
return new DeleteCommand(index.get());
}
//@@author A0130677A
private Command prepareDone(String args) {
Optional<Integer> index = parseIndex(args);
if(!index.isPresent()){
return new IncorrectCommand(
String.format(MESSAGE_INVALID_COMMAND_FORMAT, DoneCommand.MESSAGE_USAGE));
}
return new DoneCommand(index.get());
}
//@@author addressbook-level4
/**
* Parses arguments in the context of the select task command.
*
* @param args full command args string
* @return the prepared command
*/
private Command prepareSelect(String args) {
Optional<Integer> index = parseIndex(args);
if(!index.isPresent()){
return new IncorrectCommand(
String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE));
}
return new SelectCommand(index.get());
}
/**
* Returns the specified index in the {@code command} IF a positive unsigned integer is given as the index.
* Returns an {@code Optional.empty()} otherwise.
*/
private Optional<Integer> parseIndex(String command) {
final Matcher matcher = TASK_INDEX_ARGS_FORMAT.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String index = matcher.group("targetIndex");
if(!StringUtil.isUnsignedInteger(index)){
return Optional.empty();
}
return Optional.of(Integer.parseInt(index));
}
//@@author A0146130W
private Optional<Integer> parseIndex(String command, Pattern matcherFormat) {
final Matcher matcher = matcherFormat.matcher(command.trim());
if (!matcher.matches()) {
return Optional.empty();
}
String index = matcher.group("targetIndex");
if(!StringUtil.isUnsignedInteger(index)){
return Optional.empty();
}
return Optional.of(Integer.parseInt(index));
}
//@@author addressbook-level4
/**
* Parses arguments in the context of the find task command.
*
* @param args full command args string
* @return the prepared command
*/
//@@author A0130677A
private Command prepareFind(String args) {
// check if parameters are specified and pass specified field to FindCommand
String preprocessedArgs = " " + appendEnd(args.trim());
final Matcher addressMatcher = ADDRESS_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher priorityMatcher = PRIORITY_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher startDateMatcher = STARTDATE_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher dueDateMatcher = DUEDATE_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
final Matcher tagsMatcher = TAGS_TASK_DATA_ARGS_FORMAT.matcher(preprocessedArgs);
Set<String> defaultSet = new HashSet<String>();
if (addressMatcher.matches()) {
String addressToBeFound = addressMatcher.group("address");
return new FindCommand(addressToBeFound, defaultSet,"address");
}
if (priorityMatcher.matches()) {
String priorityToBeFound = priorityMatcher.group("priority");
return new FindCommand(priorityToBeFound, defaultSet, "priority");
}
if (startDateMatcher.matches()) {
String dueDateToBeFound = dueDateMatcher.group("startDate");
String parsedDueDateToBeFound = removeTimeOnDate(parseDueDate(dueDateToBeFound));
return new FindCommand(parsedDueDateToBeFound, defaultSet, "startDate");
}
if (dueDateMatcher.matches()) {
String dueDateToBeFound = dueDateMatcher.group("dueDate");
String parsedDueDateToBeFound = removeTimeOnDate(parseDueDate(dueDateToBeFound));
return new FindCommand(parsedDueDateToBeFound, defaultSet, "dueDate");
}
if (tagsMatcher.matches()) {
String tagsToBeFound = tagsMatcher.group("tagArguments");
return new FindCommand(tagsToBeFound, defaultSet,"tagArguments");
}
// free-form search by keywords
final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT,
FindCommand.MESSAGE_USAGE));
}
// keywords delimited by whitespace
final String[] splitKeywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(splitKeywords));
final String keywords = matcher.group("keywords");
return new FindCommand(keywords, keywordSet, "nil");
}
//@@author addressbook-level4
private Command prepareList(String args) {
// check if parameters are specified and pass specified field to FindCommand
//String preprocessedArgs = " " + appendEnd(args.trim());
return new ListCommand(args);
}
/**
* Parses arguments in the context of the help command.
*
* @param args full command args string
* @return the prepared command
*/
//@@author A0139158X
private Command prepareHelp(String args) {
//if no argument
if (args.equals("")) {
args="help";
}
final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(args.trim());
if (!matcher.matches()) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE));
}
final String commandWord = matcher.group("commandWord");
return new HelpCommand(commandWord);
}
//@@author A0139072H
/**
* Parses arguments in the context of the setFilePath command.
*
* @param args full command args string
* @return the prepared command
*/
private Command prepareSetFilePath(String args) {
if(args.equals("")){
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, SetFilePathCommand.MESSAGE_USAGE));
}
final String filePath = args;
try {
return new SetFilePathCommand(filePath);
} catch (IllegalValueException e) {
return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, SetFilePathCommand.MESSAGE_USAGE));
}
}
}
| |
/*
* $Id: AbstractRenderer.java 471754 2006-11-06 14:55:09Z husted $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts.faces.renderer;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import javax.faces.application.FacesMessage;
import javax.faces.component.EditableValueHolder;
import javax.faces.component.UIComponent;
import javax.faces.component.ValueHolder;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import javax.faces.convert.Converter;
import javax.faces.convert.ConverterException;
import javax.faces.el.ValueBinding;
import javax.faces.render.Renderer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* <p>Abstract base class for concrete implementations of
* <code>javax.faces.render.Renderer</code> for the
* <em>Struts-Faces Integration Library</em>.</p>
*
* @version $Rev: 471754 $ $Date: 2006-11-06 15:55:09 +0100 (Lun, 06 nov 2006) $
*/
public abstract class AbstractRenderer extends Renderer {
// -------------------------------------------------------- Static Variables
private static final Log log =
LogFactory.getLog(AbstractRenderer.class);
// -------------------------------------------------------- Renderer Methods
/**
* <p>Decode any new state of the specified <code>UIComponent</code>
* from the request contained in the specified <code>FacesContext</code>,
* and store that state on the <code>UIComponent</code>.</p>
*
* <p>The default implementation calls <code>setSubmittedValue()</code>
* unless this component has a boolean <code>disabled</code> or
* <code>readonly</code> attribute that is set to <code>true</code>.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>UIComponent</code> to be decoded
*
* @exception NullPointerException if <code>context</code> or
* <code>component</code> is <code>null</code>
*/
public void decode(FacesContext context, UIComponent component) {
// Enforce NPE requirements in the Javadocs
if ((context == null) || (component == null)) {
throw new NullPointerException();
}
// Disabled or readonly components are not decoded
if (isDisabled(component) || isReadOnly(component)) {
return;
}
// Save submitted value on EditableValueHolder components
if (component instanceof EditableValueHolder) {
setSubmittedValue(context, component);
}
}
/**
* <p>Render the beginning of the specified <code>UIComponent</code>
* to the output stream or writer associated with the response we are
* creating.</p>
*
* <p>The default implementation calls <code>renderStart()</code> and
* <code>renderAttributes()</code>.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>UIComponent</code> to be decoded
*
* @exception NullPointerException if <code>context</code> or
* <code>component</code> is <code>null</code>
*
* @exception IOException if an input/output error occurs
*/
public void encodeBegin(FacesContext context, UIComponent component)
throws IOException {
// Enforce NPE requirements in the Javadocs
if ((context == null) || (component == null)) {
throw new NullPointerException();
}
if (log.isTraceEnabled()) {
log.trace("encodeBegin(id=" + component.getId() +
", family=" + component.getFamily() +
", rendererType=" + component.getRendererType() + ")");
}
// Render the element and attributes for this component
ResponseWriter writer = context.getResponseWriter();
renderStart(context, component, writer);
renderAttributes(context, component, writer);
}
/**
* <p>Render the children of the specified <code>UIComponent</code>
* to the output stream or writer associated with the response we are
* creating.</p>
*
* <p>The default implementation iterates through the children of
* this component and renders them.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>UIComponent</code> to be decoded
*
* @exception NullPointerException if <code>context</code> or
* <code>component</code> is <code>null</code>
*
* @exception IOException if an input/output error occurs
*/
public void encodeChildren(FacesContext context, UIComponent component)
throws IOException {
if (context == null || component == null) {
throw new NullPointerException();
}
if (log.isTraceEnabled()) {
log.trace("encodeChildren(id=" + component.getId() +
", family=" + component.getFamily() +
", rendererType=" + component.getRendererType() + ")");
}
Iterator kids = component.getChildren().iterator();
while (kids.hasNext()) {
UIComponent kid = (UIComponent) kids.next();
kid.encodeBegin(context);
if (kid.getRendersChildren()) {
kid.encodeChildren(context);
}
kid.encodeEnd(context);
}
if (log.isTraceEnabled()) {
log.trace("encodeChildren(id=" + component.getId() + ") end");
}
}
/**
* <p>Render the ending of the specified <code>UIComponent</code>
* to the output stream or writer associated with the response we are
* creating.</p>
*
* <p>The default implementation calls <code>renderEnd()</code>.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>UIComponent</code> to be decoded
*
* @exception NullPointerException if <code>context</code> or
* <code>component</code> is <code>null</code>
*
* @exception IOException if an input/output error occurs
*/
public void encodeEnd(FacesContext context, UIComponent component)
throws IOException {
// Enforce NPE requirements in the Javadocs
if ((context == null) || (component == null)) {
throw new NullPointerException();
}
if (log.isTraceEnabled()) {
log.trace("encodeEnd(id=" + component.getId() +
", family=" + component.getFamily() +
", rendererType=" + component.getRendererType() + ")");
}
// Render the element closing for this component
ResponseWriter writer = context.getResponseWriter();
renderEnd(context, component, writer);
}
// --------------------------------------------------------- Package Methods
// ------------------------------------------------------- Protected Methods
/**
* <p>Render nested child components by invoking the encode methods
* on those components, but only when the <code>rendered</code>
* property is <code>true</code>.</p>
*/
protected void encodeRecursive(FacesContext context, UIComponent component)
throws IOException {
// suppress rendering if "rendered" property on the component is
// false.
if (!component.isRendered()) {
return;
}
// Render this component and its children recursively
if (log.isTraceEnabled()) {
log.trace("encodeRecursive(id=" + component.getId() +
", family=" + component.getFamily() +
", rendererType=" + component.getRendererType() +
") encodeBegin");
}
component.encodeBegin(context);
if (component.getRendersChildren()) {
if (log.isTraceEnabled()) {
log.trace("encodeRecursive(id=" + component.getId() +
") delegating");
}
component.encodeChildren(context);
} else {
if (log.isTraceEnabled()) {
log.trace("encodeRecursive(id=" + component.getId() +
") recursing");
}
Iterator kids = component.getChildren().iterator();
while (kids.hasNext()) {
UIComponent kid = (UIComponent) kids.next();
encodeRecursive(context, kid);
}
}
if (log.isTraceEnabled()) {
log.trace("encodeRecursive(id=" + component.getId() + ") encodeEnd");
}
component.encodeEnd(context);
}
/**
* <p>Return <code>true</code> if the specified component is disabled.</p>
*
* @param component <code>UIComponent</code> to be checked
*/
protected boolean isDisabled(UIComponent component) {
Object disabled = component.getAttributes().get("disabled");
if (disabled == null) {
return (false);
}
if (disabled instanceof String) {
return (Boolean.valueOf((String) disabled).booleanValue());
} else {
return (disabled.equals(Boolean.TRUE));
}
}
/**
* <p>Return <code>true</code> if the specified component is read only.</p>
*
* @param component <code>UIComponent</code> to be checked
*/
protected boolean isReadOnly(UIComponent component) {
Object readonly = component.getAttributes().get("readonly");
if (readonly == null) {
return (false);
}
if (readonly instanceof String) {
return (Boolean.valueOf((String) readonly).booleanValue());
} else {
return (readonly.equals(Boolean.TRUE));
}
}
/**
* <p>Render the element attributes for the generated markup related to this
* component. Simple renderers that create a single markup element
* for this component should override this method and include calls to
* to <code>writeAttribute()</code> and <code>writeURIAttribute</code>
* on the specified <code>ResponseWriter</code>.</p>
*
* <p>The default implementation does nothing.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
* @param writer <code>ResponseWriter</code> to which the element
* start should be rendered
*
* @exception IOException if an input/output error occurs
*/
protected void renderAttributes(FacesContext context, UIComponent component,
ResponseWriter writer) throws IOException {
}
/**
* <p>Render the element end for the generated markup related to this
* component. Simple renderers that create a single markup element
* for this component should override this method and include a call
* to <code>endElement()</code> on the specified
* <code>ResponseWriter</code>.</p>
*
* <p>The default implementation does nothing.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
* @param writer <code>ResponseWriter</code> to which the element
* start should be rendered
*
* @exception IOException if an input/output error occurs
*/
protected void renderEnd(FacesContext context, UIComponent component,
ResponseWriter writer) throws IOException {
}
/**
* <p>Render any boolean attributes on the specified list that have
* <code>true</code> values on the corresponding attribute of the
* specified <code>UIComponent</code>.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
* @param writer <code>ResponseWriter</code> to which the element
* start should be rendered
* @param names List of attribute names to be passed through
*
* @exception IOException if an input/output error occurs
*/
protected void renderBoolean(FacesContext context,
UIComponent component,
ResponseWriter writer,
String names[]) throws IOException {
if (names == null) {
return;
}
Map attributes = component.getAttributes();
boolean flag;
Object value;
for (int i = 0; i < names.length; i++) {
value = attributes.get(names[i]);
if (value != null) {
if (value instanceof String) {
flag = Boolean.valueOf((String) value).booleanValue();
} else {
flag = Boolean.valueOf(value.toString()).booleanValue();
}
if (flag) {
writer.writeAttribute(names[i], names[i], names[i]);
flag = false;
}
}
}
}
/**
* <p>Render any attributes on the specified list directly to the
* specified <code>ResponseWriter</code> for which the specified
* <code>UIComponent</code> has a non-<code>null</code> attribute value.
* This method may be used to "pass through" commonly used attribute
* name/value pairs with a minimum of code.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
* @param writer <code>ResponseWriter</code> to which the element
* start should be rendered
* @param names List of attribute names to be passed through
*
* @exception IOException if an input/output error occurs
*/
protected void renderPassThrough(FacesContext context,
UIComponent component,
ResponseWriter writer,
String names[]) throws IOException {
if (names == null) {
return;
}
Map attributes = component.getAttributes();
Object value;
for (int i = 0; i < names.length; i++) {
value = attributes.get(names[i]);
if (value != null) {
if (value instanceof String) {
writer.writeAttribute(names[i], value, names[i]);
} else {
writer.writeAttribute(names[i], value.toString(), names[i]);
}
}
}
}
/**
* <p>Render the element start for the generated markup related to this
* component. Simple renderers that create a single markup element
* for this component should override this method and include a call
* to <code>startElement()</code> on the specified
* <code>ResponseWriter</code>.</p>
*
* <p>The default implementation does nothing.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
* @param writer <code>ResponseWriter</code> to which the element
* start should be rendered
*
* @exception IOException if an input/output error occurs
*/
protected void renderStart(FacesContext context, UIComponent component,
ResponseWriter writer) throws IOException {
}
/**
* <p>If a submitted value was included on this request, store it in the
* component as appropriate.</p>
*
* <p>The default implementation determines whether this component
* implements <code>EditableValueHolder</code>. If so, it checks for a
* request parameter with the same name as the <code>clientId</code>
* of this <code>UIComponent</code>. If there is such a parameter, its
* value is passed (as a String) to the <code>setSubmittedValue()</code>
* method on the <code>EditableValueHolder</code> component.</p>
*
* @param context <code>FacesContext</code> for the current request
* @param component <code>EditableValueHolder</code> component whose
* submitted value is to be stored
*/
protected void setSubmittedValue
(FacesContext context, UIComponent component) {
if (!(component instanceof EditableValueHolder)) {
return;
}
String clientId = component.getClientId(context);
Map parameters = context.getExternalContext().getRequestParameterMap();
if (parameters.containsKey(clientId)) {
if (log.isTraceEnabled()) {
log.trace("setSubmittedValue(" + clientId + "," +
(String) parameters.get(clientId));
}
component.getAttributes().put("submittedValue",
parameters.get(clientId));
}
}
// --------------------------------------------------------- Private Methods
/**
* <p>Decode the current state of the specified UIComponent from the
* request contained in the specified <code>FacesContext</code>, and
* attempt to convert this state information into an object of the
* type equired for this component.</p>
*
* @param context FacesContext for the request we are processing
* @param component UIComponent to be decoded
*
* @exception NullPointerException if context or component is null
*/
/*
public void decode(FacesContext context, UIComponent component) {
// Enforce NPE requirements in the Javadocs
if ((context == null) || (component == null)) {
throw new NullPointerException();
}
// Only input components need to be decoded
if (!(component instanceof UIInput)) {
return;
}
UIInput input = (UIInput) component;
// Save the old value for use in generating ValueChangedEvents
Object oldValue = input.getValue();
if (oldValue instanceof String) {
try {
oldValue = getAsObject(context, component, (String) oldValue);
} catch (ConverterException e) {
;
}
}
input.setPrevious(oldValue);
// Decode and convert (if needed) the new value
String clientId = component.getClientId(context);
Map map = context.getExternalContext().getRequestParameterMap();
String newString = (String) map.get(clientId);
Object newValue = null;
try {
newValue = getAsObject(context, component, newString);
input.setValue(newValue);
input.setValid(true);
} catch (ConverterException e) {
input.setValue(newValue);
input.setValid(false);
addConverterMessage(context, component, e.getMessage());
}
}
*/
// --------------------------------------------------------- Package Methods
// ------------------------------------------------------- Protected Methods
/**
* <p>Add an error message denoting a conversion failure.</p>
*
* @param context The <code>FacesContext</code> for this request
* @param component The <code>UIComponent</code> that experienced
* the conversion failure
* @param text The text of the error message
*/
/*
protected void addConverterMessage(FacesContext context,
UIComponent component,
String text) {
String clientId = component.getClientId(context);
FacesMessage message = new FacesMessage
(text,
"Conversion error on component '" + clientId + "'");
context.addMessage(clientId, message);
}
*/
/**
* <p>Convert the String representation of this component's value
* to the corresponding Object representation. The default
* implementation utilizes the <code>getAsObject()</code> method of any
* associated <code>Converter</code>.</p>
*
* @param context The <code>FacesContext</code> for this request
* @param component The <code>UIComponent</code> whose value is
* being converted
* @param value The String representation to be converted
*
* @exception ConverterException if conversion fails
*/
/*
protected Object getAsObject(FacesContext context, UIComponent component,
String value) throws ConverterException {
// Identify any Converter associated with this component value
ValueBinding vb = component.getValueBinding("value");
Converter converter = null;
if (component instanceof ValueHolder) {
// Acquire explicitly assigned Converter (if any)
converter = ((ValueHolder) component).getConverter();
}
if ((converter == null) && (vb != null)) {
Class type = vb.getType(context);
if ((type == null) || (type == String.class)) {
return (value); // No conversion required for Strings
}
// Acquire implicit by-type Converter (if any)
converter = context.getApplication().createConverter(type);
}
// Convert the result if we identified a Converter
if (converter != null) {
return (converter.getAsObject(context, component, value));
} else {
return (value);
}
}
*/
/**
* <p>Convert the Object representation of this component's value
* to the corresponding String representation. The default implementation
* utilizes the <code>getAsString()</code> method of any associated
* <code>Converter</code>.</p>
*
* @param context The <code>FacesContext</code> for this request
* @param component The <code>UIComponent</code> whose value is
* being converted
* @param value The Object representation to be converted
*
* @exception ConverterException if conversion fails
*/
protected String getAsString(FacesContext context, UIComponent component,
Object value) throws ConverterException {
// Identify any Converter associated with this component value
ValueBinding vb = component.getValueBinding("value");
Converter converter = null;
if (component instanceof ValueHolder) {
// Acquire explicitly assigned Converter (if any)
converter = ((ValueHolder) component).getConverter();
}
if ((converter == null) && (vb != null)) {
// Acquire implicit by-type Converter (if any)
Class type = vb.getType(context);
if (type != null) {
converter = context.getApplication().createConverter(type);
}
}
// Convert the result if we identified a Converter
if (converter != null) {
return (converter.getAsString(context, component, value));
} else if (value == null) {
return ("");
} else if (value instanceof String) {
return ((String) value);
} else {
return (value.toString());
}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.llvm;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* <h3>Layout</h3>
*
* <pre><code>
* struct CXIdxDeclInfo {
* {@link CXIdxEntityInfo CXIdxEntityInfo} const * entityInfo;
* {@link CXCursor CXCursor} cursor;
* {@link CXIdxLoc CXIdxLoc} loc;
* {@link CXIdxContainerInfo CXIdxContainerInfo} const * semanticContainer;
* {@link CXIdxContainerInfo CXIdxContainerInfo} const * {@link #lexicalContainer};
* int isRedeclaration;
* int isDefinition;
* int isContainer;
* {@link CXIdxContainerInfo CXIdxContainerInfo} const * declAsContainer;
* int {@link #isImplicit};
* {@link CXIdxAttrInfo CXIdxAttrInfo} const * const * attributes;
* unsigned numAttributes;
* unsigned flags;
* }</code></pre>
*/
public class CXIdxDeclInfo extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
ENTITYINFO,
CURSOR,
LOC,
SEMANTICCONTAINER,
LEXICALCONTAINER,
ISREDECLARATION,
ISDEFINITION,
ISCONTAINER,
DECLASCONTAINER,
ISIMPLICIT,
ATTRIBUTES,
NUMATTRIBUTES,
FLAGS;
static {
Layout layout = __struct(
__member(POINTER_SIZE),
__member(CXCursor.SIZEOF, CXCursor.ALIGNOF),
__member(CXIdxLoc.SIZEOF, CXIdxLoc.ALIGNOF),
__member(POINTER_SIZE),
__member(POINTER_SIZE),
__member(4),
__member(4),
__member(4),
__member(POINTER_SIZE),
__member(4),
__member(POINTER_SIZE),
__member(4),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
ENTITYINFO = layout.offsetof(0);
CURSOR = layout.offsetof(1);
LOC = layout.offsetof(2);
SEMANTICCONTAINER = layout.offsetof(3);
LEXICALCONTAINER = layout.offsetof(4);
ISREDECLARATION = layout.offsetof(5);
ISDEFINITION = layout.offsetof(6);
ISCONTAINER = layout.offsetof(7);
DECLASCONTAINER = layout.offsetof(8);
ISIMPLICIT = layout.offsetof(9);
ATTRIBUTES = layout.offsetof(10);
NUMATTRIBUTES = layout.offsetof(11);
FLAGS = layout.offsetof(12);
}
/**
* Creates a {@code CXIdxDeclInfo} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public CXIdxDeclInfo(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** @return a {@link CXIdxEntityInfo} view of the struct pointed to by the {@code entityInfo} field. */
@NativeType("CXIdxEntityInfo const *")
public CXIdxEntityInfo entityInfo() { return nentityInfo(address()); }
/** @return a {@link CXCursor} view of the {@code cursor} field. */
public CXCursor cursor() { return ncursor(address()); }
/** @return a {@link CXIdxLoc} view of the {@code loc} field. */
public CXIdxLoc loc() { return nloc(address()); }
/** @return a {@link CXIdxContainerInfo} view of the struct pointed to by the {@code semanticContainer} field. */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo semanticContainer() { return nsemanticContainer(address()); }
/** generally same as {@code semanticContainer} but can be different in cases like out-of-line C++ member functions */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo lexicalContainer() { return nlexicalContainer(address()); }
/** @return the value of the {@code isRedeclaration} field. */
@NativeType("int")
public boolean isRedeclaration() { return nisRedeclaration(address()) != 0; }
/** @return the value of the {@code isDefinition} field. */
@NativeType("int")
public boolean isDefinition() { return nisDefinition(address()) != 0; }
/** @return the value of the {@code isContainer} field. */
@NativeType("int")
public boolean isContainer() { return nisContainer(address()) != 0; }
/** @return a {@link CXIdxContainerInfo} view of the struct pointed to by the {@code declAsContainer} field. */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo declAsContainer() { return ndeclAsContainer(address()); }
/** whether the declaration exists in code or was created implicitly by the compiler, e.g. implicit Objective-C methods for properties */
@NativeType("int")
public boolean isImplicit() { return nisImplicit(address()) != 0; }
/** @return a {@link PointerBuffer} view of the data pointed to by the {@code attributes} field. */
@NativeType("CXIdxAttrInfo const * const *")
public PointerBuffer attributes() { return nattributes(address()); }
/** @return the value of the {@code numAttributes} field. */
@NativeType("unsigned")
public int numAttributes() { return nnumAttributes(address()); }
/** @return the value of the {@code flags} field. */
@NativeType("unsigned")
public int flags() { return nflags(address()); }
// -----------------------------------
/** Returns a new {@code CXIdxDeclInfo} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static CXIdxDeclInfo malloc() {
return wrap(CXIdxDeclInfo.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code CXIdxDeclInfo} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static CXIdxDeclInfo calloc() {
return wrap(CXIdxDeclInfo.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code CXIdxDeclInfo} instance allocated with {@link BufferUtils}. */
public static CXIdxDeclInfo create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(CXIdxDeclInfo.class, memAddress(container), container);
}
/** Returns a new {@code CXIdxDeclInfo} instance for the specified memory address. */
public static CXIdxDeclInfo create(long address) {
return wrap(CXIdxDeclInfo.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static CXIdxDeclInfo createSafe(long address) {
return address == NULL ? null : wrap(CXIdxDeclInfo.class, address);
}
/**
* Returns a new {@link CXIdxDeclInfo.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link CXIdxDeclInfo.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link CXIdxDeclInfo.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link CXIdxDeclInfo.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static CXIdxDeclInfo.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static CXIdxDeclInfo.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code CXIdxDeclInfo} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static CXIdxDeclInfo malloc(MemoryStack stack) {
return wrap(CXIdxDeclInfo.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code CXIdxDeclInfo} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static CXIdxDeclInfo calloc(MemoryStack stack) {
return wrap(CXIdxDeclInfo.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link CXIdxDeclInfo.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link CXIdxDeclInfo.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static CXIdxDeclInfo.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #entityInfo}. */
public static CXIdxEntityInfo nentityInfo(long struct) { return CXIdxEntityInfo.create(memGetAddress(struct + CXIdxDeclInfo.ENTITYINFO)); }
/** Unsafe version of {@link #cursor}. */
public static CXCursor ncursor(long struct) { return CXCursor.create(struct + CXIdxDeclInfo.CURSOR); }
/** Unsafe version of {@link #loc}. */
public static CXIdxLoc nloc(long struct) { return CXIdxLoc.create(struct + CXIdxDeclInfo.LOC); }
/** Unsafe version of {@link #semanticContainer}. */
public static CXIdxContainerInfo nsemanticContainer(long struct) { return CXIdxContainerInfo.create(memGetAddress(struct + CXIdxDeclInfo.SEMANTICCONTAINER)); }
/** Unsafe version of {@link #lexicalContainer}. */
public static CXIdxContainerInfo nlexicalContainer(long struct) { return CXIdxContainerInfo.create(memGetAddress(struct + CXIdxDeclInfo.LEXICALCONTAINER)); }
/** Unsafe version of {@link #isRedeclaration}. */
public static int nisRedeclaration(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.ISREDECLARATION); }
/** Unsafe version of {@link #isDefinition}. */
public static int nisDefinition(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.ISDEFINITION); }
/** Unsafe version of {@link #isContainer}. */
public static int nisContainer(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.ISCONTAINER); }
/** Unsafe version of {@link #declAsContainer}. */
public static CXIdxContainerInfo ndeclAsContainer(long struct) { return CXIdxContainerInfo.create(memGetAddress(struct + CXIdxDeclInfo.DECLASCONTAINER)); }
/** Unsafe version of {@link #isImplicit}. */
public static int nisImplicit(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.ISIMPLICIT); }
/** Unsafe version of {@link #attributes() attributes}. */
public static PointerBuffer nattributes(long struct) { return memPointerBuffer(memGetAddress(struct + CXIdxDeclInfo.ATTRIBUTES), nnumAttributes(struct)); }
/** Unsafe version of {@link #numAttributes}. */
public static int nnumAttributes(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.NUMATTRIBUTES); }
/** Unsafe version of {@link #flags}. */
public static int nflags(long struct) { return UNSAFE.getInt(null, struct + CXIdxDeclInfo.FLAGS); }
// -----------------------------------
/** An array of {@link CXIdxDeclInfo} structs. */
public static class Buffer extends StructBuffer<CXIdxDeclInfo, Buffer> implements NativeResource {
private static final CXIdxDeclInfo ELEMENT_FACTORY = CXIdxDeclInfo.create(-1L);
/**
* Creates a new {@code CXIdxDeclInfo.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link CXIdxDeclInfo#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected CXIdxDeclInfo getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return a {@link CXIdxEntityInfo} view of the struct pointed to by the {@code entityInfo} field. */
@NativeType("CXIdxEntityInfo const *")
public CXIdxEntityInfo entityInfo() { return CXIdxDeclInfo.nentityInfo(address()); }
/** @return a {@link CXCursor} view of the {@code cursor} field. */
public CXCursor cursor() { return CXIdxDeclInfo.ncursor(address()); }
/** @return a {@link CXIdxLoc} view of the {@code loc} field. */
public CXIdxLoc loc() { return CXIdxDeclInfo.nloc(address()); }
/** @return a {@link CXIdxContainerInfo} view of the struct pointed to by the {@code semanticContainer} field. */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo semanticContainer() { return CXIdxDeclInfo.nsemanticContainer(address()); }
/** @return a {@link CXIdxContainerInfo} view of the struct pointed to by the {@link CXIdxDeclInfo#lexicalContainer} field. */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo lexicalContainer() { return CXIdxDeclInfo.nlexicalContainer(address()); }
/** @return the value of the {@code isRedeclaration} field. */
@NativeType("int")
public boolean isRedeclaration() { return CXIdxDeclInfo.nisRedeclaration(address()) != 0; }
/** @return the value of the {@code isDefinition} field. */
@NativeType("int")
public boolean isDefinition() { return CXIdxDeclInfo.nisDefinition(address()) != 0; }
/** @return the value of the {@code isContainer} field. */
@NativeType("int")
public boolean isContainer() { return CXIdxDeclInfo.nisContainer(address()) != 0; }
/** @return a {@link CXIdxContainerInfo} view of the struct pointed to by the {@code declAsContainer} field. */
@NativeType("CXIdxContainerInfo const *")
public CXIdxContainerInfo declAsContainer() { return CXIdxDeclInfo.ndeclAsContainer(address()); }
/** @return the value of the {@link CXIdxDeclInfo#isImplicit} field. */
@NativeType("int")
public boolean isImplicit() { return CXIdxDeclInfo.nisImplicit(address()) != 0; }
/** @return a {@link PointerBuffer} view of the data pointed to by the {@code attributes} field. */
@NativeType("CXIdxAttrInfo const * const *")
public PointerBuffer attributes() { return CXIdxDeclInfo.nattributes(address()); }
/** @return the value of the {@code numAttributes} field. */
@NativeType("unsigned")
public int numAttributes() { return CXIdxDeclInfo.nnumAttributes(address()); }
/** @return the value of the {@code flags} field. */
@NativeType("unsigned")
public int flags() { return CXIdxDeclInfo.nflags(address()); }
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.data.dao.assessment;
import org.sakaiproject.event.cover.NotificationService;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentAccessControlIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentBaseIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentTemplateIfc;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentIfc;
//import org.sakaiproject.tool.assessment.facade.AuthzQueriesFacadeAPI;
//import org.sakaiproject.tool.assessment.services.PersistenceService;
import java.util.Date;
import lombok.Getter;
import lombok.Setter;
/**
* This keeps track of the submission scheme, and the number allowed.
*
* @author Rachel Gollub
*/
public class AssessmentAccessControl
implements java.io.Serializable, AssessmentAccessControlIfc
{
// keep in mind that this id can be an assesmentId or assessmentTemplateId.
// This depends on the AssessmentBase object (superclass of AssessmentData and
// AssessmentTemplateData) that is associated with it.
/**
*
*/
private static final long serialVersionUID = 8330416434678491916L;
// flag it when no editing on the property is desire
public static final Integer NO_EDIT = Integer.valueOf(-1);
// timedAssessment
public static final Integer TIMED_ASSESSMENT = Integer.valueOf(1);
public static final Integer DO_NOT_TIMED_ASSESSMENT = Integer.valueOf(0);
// autoSubmit
public static final Integer AUTO_SUBMIT = Integer.valueOf(1);
public static final Integer DO_NOT_AUTO_SUBMIT = Integer.valueOf(0);
// autoSave
public static final Integer SAVE_ON_CLICK = Integer.valueOf(1);
public static final Integer AUTO_SAVE = Integer.valueOf(2);
// itemNavigation
public static final Integer LINEAR_ACCESS = Integer.valueOf(1);
public static final Integer RANDOM_ACCESS = Integer.valueOf(2);
// assessmentFormat
public static final Integer BY_QUESTION = Integer.valueOf(1);
public static final Integer BY_PART = Integer.valueOf(2);
public static final Integer BY_ASSESSMENT = Integer.valueOf(3);
// itemNumbering
public static final Integer CONTINUOUS_NUMBERING = Integer.valueOf(1);
public static final Integer RESTART_NUMBERING_BY_PART = Integer.valueOf(2);
//itemScoreDisplay
public static Integer DISPLAY_ITEM_SCORE_DURING_ASSESSMENT = Integer.valueOf(1);
public static Integer HIDE_ITEM_SCORE_DURING_ASSESSMENT = Integer.valueOf(2);
// markForReview
public static final Integer MARK_FOR_REVIEW = Integer.valueOf(1);
public static final Integer NOT_MARK_FOR_REVIEW = Integer.valueOf(0);
// submissionsAllowed
public static final Integer UNLIMITED_SUBMISSIONS_ALLOWED = Integer.valueOf(9999);
// lateHandling
public static final Integer ACCEPT_LATE_SUBMISSION = Integer.valueOf(1);
public static final Integer NOT_ACCEPT_LATE_SUBMISSION = Integer.valueOf(2);
private Long id;
private AssessmentBaseIfc assessmentBase;
private Integer submissionsAllowed;
private Boolean unlimitedSubmissions;
private Integer submissionsSaved;
private Integer assessmentFormat;
private Integer bookMarkingItem;
private Integer timeLimit;
private Integer timedAssessment;
private Integer retryAllowed;
private Integer lateHandling;
private Integer instructorNotification;
private Date startDate;
private Date dueDate;
private Date scoreDate;
private Date feedbackDate;
@Setter @Getter private Date feedbackEndDate;
@Setter @Getter private Double feedbackScoreThreshold;
private Date retractDate;
private Integer autoSubmit; // auto submit when time expires
private Integer itemNavigation; // linear (1)or random (0)
private Integer itemNumbering; // continuous between parts(1), restart between parts(0)
private Integer displayScoreDuringAssessments;
private String submissionMessage;
private String finalPageUrl;
private String releaseTo;
private String password;
private Integer markForReview;
private Boolean honorPledge;
/**
* Creates a new SubmissionModel object.
*/
public AssessmentAccessControl()
{
this.submissionsAllowed = Integer.valueOf(9999); // = no limit
this.submissionsSaved = Integer.valueOf(1); // no. of copy
}
public AssessmentAccessControl(Integer submissionsAllowed, Integer submissionsSaved,
Integer assessmentFormat, Integer bookMarkingItem,
Integer timeLimit, Integer timedAssessment,
Integer retryAllowed, Integer lateHandling, Integer instructorNotification,
Date startDate, Date dueDate,
Date scoreDate, Date feedbackDate,
Date retractDate, Integer autoSubmit,
Integer itemNavigation, Integer itemNumbering, Integer displayScoreDuringAssessments,
String submissionMessage, String releaseTo)
{
this.submissionsAllowed = submissionsAllowed; // = no limit
this.submissionsSaved = submissionsSaved; // no. of copy
this.assessmentFormat = assessmentFormat;
this.bookMarkingItem = bookMarkingItem;
this.timeLimit = timeLimit;
this.timedAssessment = timedAssessment;
this.retryAllowed = retryAllowed; // cannot edit(0)
this.lateHandling = lateHandling; // cannot edit(0)
this.instructorNotification = instructorNotification;
this.startDate = startDate;
this.dueDate = dueDate;
this.scoreDate = scoreDate;
this.feedbackDate = feedbackDate;
this.retractDate = retractDate;
this.autoSubmit = autoSubmit; // cannot edit (0) auto submit(1) when time expires (2)
this.itemNavigation = itemNavigation; // cannot edit (0) linear(1) or random (2)
this.itemNumbering = itemNumbering; // cannot edit(0) continuous between parts (1), restart between parts (2)
this.displayScoreDuringAssessments = displayScoreDuringAssessments;
this.submissionMessage = submissionMessage;
this.releaseTo = releaseTo;
}
public Object clone() throws CloneNotSupportedException{
Object cloned = new AssessmentAccessControl(
this.getSubmissionsAllowed(), this.getSubmissionsSaved(),
this.getAssessmentFormat(), this.getBookMarkingItem(),
this.getTimeLimit(), this.getTimedAssessment(),
this.getRetryAllowed(), this.getLateHandling(), this.getInstructorNotification(),
this.getStartDate(), this.getDueDate(),
this.getScoreDate(), this.getFeedbackDate(),
this.getRetractDate(), this.getAutoSubmit(),
this.getItemNavigation(), this.getItemNumbering(), this.getDisplayScoreDuringAssessments(),
this.getSubmissionMessage(), this.getReleaseTo());
((AssessmentAccessControl)cloned).setRetractDate(this.retractDate);
((AssessmentAccessControl)cloned).setAutoSubmit(this.autoSubmit);
((AssessmentAccessControl)cloned).setItemNavigation(this.itemNavigation);
((AssessmentAccessControl)cloned).setItemNumbering(this.itemNumbering);
((AssessmentAccessControl)cloned).setDisplayScoreDuringAssessments(this.displayScoreDuringAssessments);
((AssessmentAccessControl)cloned).setSubmissionMessage(this.submissionMessage);
((AssessmentAccessControl)cloned).setPassword(this.password);
((AssessmentAccessControl)cloned).setFinalPageUrl(this.finalPageUrl);
((AssessmentAccessControl)cloned).setUnlimitedSubmissions(this.unlimitedSubmissions);
((AssessmentAccessControl)cloned).setMarkForReview(this.markForReview);
((AssessmentAccessControl)cloned).setHonorPledge(this.honorPledge);
((AssessmentAccessControl)cloned).setFeedbackEndDate(this.feedbackEndDate);
((AssessmentAccessControl)cloned).setFeedbackScoreThreshold(this.feedbackScoreThreshold);
return cloned;
}
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public void setAssessmentBase(AssessmentBaseIfc assessmentBase)
{
this.assessmentBase = assessmentBase;
}
public AssessmentBaseIfc getAssessmentBase()
{
if (assessmentBase.getIsTemplate().equals(Boolean.TRUE))
return (AssessmentTemplateIfc)assessmentBase;
else
return (AssessmentIfc)assessmentBase;
}
public Integer getSubmissionsAllowed()
{
return submissionsAllowed;
}
public void setSubmissionsAllowed(Integer psubmissionsAllowed)
{
submissionsAllowed = psubmissionsAllowed;
}
public Integer getSubmissionsSaved()
{
return submissionsSaved;
}
public void setSubmissionsSaved(Integer psubmissionsSaved)
{
submissionsSaved = psubmissionsSaved;
}
public Integer getAssessmentFormat()
{
return assessmentFormat;
}
public void setAssessmentFormat(Integer assessmentFormat)
{
this.assessmentFormat = assessmentFormat;
}
public Integer getBookMarkingItem()
{
return bookMarkingItem;
}
public void setBookMarkingItem(Integer bookMarkingItem)
{
this.bookMarkingItem = bookMarkingItem;
}
public Integer getTimeLimit()
{
return timeLimit;
}
public void setTimeLimit(Integer timeLimit)
{
this.timeLimit = timeLimit;
}
public Integer getTimedAssessment()
{
return timedAssessment;
}
public void setTimedAssessment(Integer timedAssessment)
{
this.timedAssessment = timedAssessment;
}
public void setRetryAllowed(Integer retryAllowed)
{
this.retryAllowed = retryAllowed;
}
public Integer getRetryAllowed()
{
return retryAllowed;
}
public void setLateHandling(Integer lateHandling)
{
this.lateHandling = lateHandling;
}
public Integer getLateHandling()
{
return lateHandling;
}
public void setInstructorNotification(Integer instructorNotification)
{
this.instructorNotification = instructorNotification;
}
public Integer getInstructorNotification()
{
return instructorNotification;
}
public Date getStartDate() {
return this.startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
public Date getDueDate() {
return this.dueDate;
}
public void setDueDate(Date dueDate) {
this.dueDate = dueDate;
}
public Date getScoreDate() {
return this.scoreDate;
}
public void setScoreDate(Date scoreDate) {
this.scoreDate = scoreDate;
}
public Date getFeedbackDate() {
return this.feedbackDate;
}
public void setFeedbackDate(Date feedbackDate) {
this.feedbackDate = feedbackDate;
}
public Date getRetractDate() {
return this.retractDate;
}
public void setRetractDate(Date retractDate) {
this.retractDate = retractDate;
}
public void setAutoSubmit(Integer autoSubmit)
{
this.autoSubmit = autoSubmit;
}
public Integer getAutoSubmit()
{
return autoSubmit;
}
public void setItemNavigation(Integer itemNavigation)
{
this.itemNavigation = itemNavigation;
}
public Integer getItemNavigation()
{
return itemNavigation;
}
public void setItemNumbering(Integer itemNumbering)
{
this.itemNumbering = itemNumbering;
}
public Integer getItemNumbering()
{
return itemNumbering;
}
public void setDisplayScoreDuringAssessments(Integer displayScoreDuringAssessments)
{
this.displayScoreDuringAssessments = displayScoreDuringAssessments;
}
public Integer getDisplayScoreDuringAssessments()
{
return displayScoreDuringAssessments;
}
public void setSubmissionMessage(String submissionMessage)
{
this.submissionMessage = submissionMessage;
}
public String getSubmissionMessage()
{
return submissionMessage;
}
public void setFinalPageUrl(String finalPageUrl) {
this.finalPageUrl = finalPageUrl;
}
public String getFinalPageUrl() {
return finalPageUrl;
}
public String getReleaseTo() {
return this.releaseTo;
}
public void setReleaseTo(String releaseTo) {
this.releaseTo = releaseTo;
}
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
this.password = password;
}
public Boolean getUnlimitedSubmissions() {
return this.unlimitedSubmissions;
}
public void setUnlimitedSubmissions(Boolean unlimitedSubmissions) {
this.unlimitedSubmissions = unlimitedSubmissions;
}
public Integer getMarkForReview() {
return this.markForReview;
}
public void setMarkForReview(Integer markForReview) {
this.markForReview = markForReview;
}
@Override
public Boolean getHonorPledge() { return this.honorPledge; }
@Override
public void setHonorPledge(Boolean honorPledge) { this.honorPledge = honorPledge; }
}
| |
/*
* DynamicJava - Copyright (C) 1999-2001
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL DYADE BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Except as contained in this notice, the name of Dyade shall not be
* used in advertising or otherwise to promote the sale, use or other
* dealings in this Software without prior written authorization from
* Dyade.
*
*/
package koala.dynamicjava.interpreter;
import jeliot.mcode.*;
import koala.dynamicjava.interpreter.error.CatchedExceptionError;
import koala.dynamicjava.interpreter.error.ExecutionError;
import koala.dynamicjava.interpreter.throwable.ThrownException;
import koala.dynamicjava.parser.wrapper.ParseError;
import koala.dynamicjava.tree.Node;
/**
* This exception is thrown when an error append while
* interpreting a statement
*
* @author Stephane Hillion
* @version 1.0 - 1999/11/14
*/
public class InterpreterException extends ThrownException {
/**
* The source code information
*/
protected SourceInformation sourceInformation;
/**
* The detailed message
*/
protected String message;
/**
* Constructs an <code>InterpreterException</code> from a ParseError
*/
public InterpreterException(ParseError e) {
super(e);
String m = e.getMessage();
m = MCodeUtilities.replace(m, "<", "<");
m = MCodeUtilities.replace(m, ">", ">");
m = MCodeUtilities.replace(m,"\n","<BR>");
m = MCodeUtilities.replace(m,"\r","");
if (e.getLine() != -1) {
sourceInformation = new SourceInformation(e.getFilename(),
e.getLine(),
e.getColumn());
message = "<H2>Syntax Error</H2><P><B>Line " + e.getLine() +
", Column " + e.getColumn() + ":</P><P>" + m + "</B></P>";
} else {
message = "<H2>Syntax Error</H2><P>" + m + "</P>";
int line = 0;
int column = 0;
String file = "buffer";
line = MCodeUtilities.findNumber(m, "line");
column = MCodeUtilities.findNumber(m, "column");
//System.out.println(message);
sourceInformation = new SourceInformation(file,
line,
column);
}
}
/**
* Constructs an <code>InterpreterException</code> from a ExecutionError
*/
public InterpreterException(ExecutionError e) {
super(e);
boolean sourceGot = false;
Node n = e.getNode();
if (n != null && n.getFilename() != null) {
message = "<H2>Execution Error</H2><P><B>Line "+
n.getBeginLine()+", Column "+n.getBeginColumn()+
":</B></P>";
sourceInformation = new SourceInformation(n.getFilename(),
n.getBeginLine(),
n.getBeginColumn());
sourceGot = true;
} else {
message = "<H2>Execution Error</H2>";
}
if (e instanceof CatchedExceptionError) {
String m = ((CatchedExceptionError)e).getException().toString();
m = MCodeUtilities.replace(m, "<", "<");
m = MCodeUtilities.replace(m, ">", ">");
m = MCodeUtilities.replace(m,"\n","<BR>");
m = MCodeUtilities.replace(m,"\r","");
message += "<P>" + m + "</P>";
if (!sourceGot) {
int line = 0;
int column = 0;
String file = "buffer";
line = MCodeUtilities.findNumber(m, "line");
column = MCodeUtilities.findNumber(m, "column");
//System.out.println(message);
sourceInformation = new SourceInformation(file,
line,
column);
}
} else if (e instanceof ThrownException) {
String m = ((ThrownException)e).getException().toString();
m = MCodeUtilities.replace(m, "<", "<");
m = MCodeUtilities.replace(m, ">", ">");
m = MCodeUtilities.replace(m,"\n","<BR>");
m = MCodeUtilities.replace(m,"\r","");
message += "<P>" + m + "</P>";
if (!sourceGot) {
int line = 0;
int column = 0;
String file = "buffer";
line = MCodeUtilities.findNumber(m, "line");
column = MCodeUtilities.findNumber(m, "column");
//System.out.println(message);
sourceInformation = new SourceInformation(file,
line,
column);
}
} else {
String m = e.getMessage();
m = MCodeUtilities.replace(m, "<", "<");
m = MCodeUtilities.replace(m, ">", ">");
m = MCodeUtilities.replace(m,"\n","<BR>");
m = MCodeUtilities.replace(m,"\r","");
message += "<P>" + m + "</P>";
if (!sourceGot) {
int line = 0;
int column = 0;
String file = "buffer";
line = MCodeUtilities.findNumber(m, "line");
column = MCodeUtilities.findNumber(m, "column");
//System.out.println(message);
sourceInformation = new SourceInformation(file,
line,
column);
}
}
}
public Throwable getError() {
return thrown;
}
/**
* Returns the source code information if available, or null
*/
public SourceInformation getSourceInformation() {
return sourceInformation;
}
/**
* Returns the detailed message
*/
public String getMessage() {
return message;
}
/**
* To represent the source code informations
*/
public static class SourceInformation {
// The fields
private String filename;
private int line;
private int column;
/**
* Creates a source information
*/
public SourceInformation(String filename, int line, int column) {
this.filename = filename;
this.line = line;
this.column = column;
}
/**
* Returns the filename
*/
public String getFilename() {
return filename;
}
/**
* Returns the line where the error occurs
*/
public int getLine() {
return line;
}
/**
* Returns the column where the error occurs
*/
public int getColumn() {
return column;
}
}
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.ir.transform.statement;
import gw.internal.gosu.ir.nodes.IRProperty;
import gw.internal.gosu.ir.nodes.IRPropertyFactory;
import gw.internal.gosu.ir.nodes.IRPropertyFromPropertyInfo;
import gw.internal.gosu.ir.transform.ExpressionTransformer;
import gw.internal.gosu.ir.transform.TopLevelTransformationContext;
import gw.internal.gosu.ir.transform.util.AccessibilityUtil;
import gw.internal.gosu.ir.transform.util.IRTypeResolver;
import gw.internal.gosu.parser.BeanAccess;
import gw.internal.gosu.parser.GosuVarPropertyInfo;
import gw.internal.gosu.parser.JavaFieldPropertyInfo;
import gw.internal.gosu.parser.JavaPropertyInfo;
import gw.internal.gosu.parser.expressions.Identifier;
import gw.internal.gosu.parser.statements.MemberAssignmentStatement;
import gw.internal.gosu.runtime.GosuRuntimeMethods;
import gw.lang.ir.IRExpression;
import gw.lang.ir.IRStatement;
import gw.lang.ir.IRSymbol;
import gw.lang.ir.IRType;
import gw.lang.ir.statement.IRAssignmentStatement;
import gw.lang.ir.statement.IRStatementList;
import gw.lang.parser.IExpression;
import gw.lang.parser.Keyword;
import gw.lang.parser.exceptions.ParseException;
import gw.lang.reflect.IMetaType;
import gw.lang.reflect.IPlaceholder;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IPropertyInfoDelegate;
import gw.lang.reflect.IType;
import gw.lang.reflect.gs.IGosuVarPropertyInfo;
import gw.lang.reflect.java.IJavaPropertyInfo;
/**
*/
public class MemberAssignmentStatementTransformer extends AbstractStatementTransformer<MemberAssignmentStatement> {
public static IRStatement compile( TopLevelTransformationContext cc, MemberAssignmentStatement stmt ) {
MemberAssignmentStatementTransformer gen = new MemberAssignmentStatementTransformer( cc, stmt );
return gen.compile();
}
private MemberAssignmentStatementTransformer( TopLevelTransformationContext cc, MemberAssignmentStatement stmt ) {
super( cc, stmt );
}
@Override
protected IRStatement compile_impl() {
String strMemberName = _stmt().getMemberName();
if( strMemberName == null ) {
// If the name is false, it's of the form foo[bar] where bar is a variable. We have to do the access reflectively
IRExpression memberNameExpression = ExpressionTransformer.compile( _stmt().getMemberExpression(), _cc() );
if( _stmt().getRootExpression().getType() instanceof IMetaType ) {
// If it's a meta type, assume it's a static property
return reflectivelySetProperty( _stmt().getRootExpression().getType(), memberNameExpression, null, false );
}
else {
return reflectivelySetProperty( _stmt().getRootExpression().getType(), memberNameExpression, ExpressionTransformer.compile( _stmt().getRootExpression(), _cc() ), true );
}
}
else {
try {
IPropertyInfo pi = BeanAccess.getPropertyInfo( _stmt().getRootExpression().getType(), strMemberName, null, null, null );
IRProperty irProperty = IRPropertyFactory.createIRProperty( pi );
IRType propertyType = irProperty.getType();
if( pi.isStatic() ) {
return assignStaticMember( pi, irProperty, propertyType );
}
else {
return assignInstanceMember( pi, irProperty );
}
}
catch( ParseException e ) {
throw new RuntimeException( e );
}
}
}
private IRStatement assignInstanceMember( IPropertyInfo pi, IRProperty irProperty ) {
IExpression rootExpr = _stmt().getRootExpression();
IRExpression root;
IRSymbol tempRoot;
IRStatement ret;
IRAssignmentStatement tempRootAssn;
if( _stmt().isCompoundStatement() )
{
IType concreteType = getConcreteType( rootExpr.getType() );
root = pushRootExpression( concreteType, rootExpr, irProperty );
tempRoot = _cc().makeAndIndexTempSymbol( getDescriptor(concreteType) );
tempRootAssn = buildAssignment( tempRoot, root );
root = identifier( tempRoot );
ExpressionTransformer.addTempSymbolForCompoundAssignment( rootExpr, tempRoot );
}
else
{
root = pushRootExpression( getConcreteType( rootExpr.getType() ), rootExpr, irProperty );
tempRootAssn = null;
}
if( isScopedField( pi ) ) {
IGosuVarPropertyInfo propertyInfo = getActualPropertyInfo( pi );
ret = setScopedSymbolValue( propertyInfo, _stmt().getExpression() );
}
else if( irProperty.isBytecodeProperty() ) {
IRExpression rhs = compileRhs( irProperty);
if( irProperty.isField() ) {
ret = setField( irProperty, root, rhs );
}
else if( isWriteMethodMissingAndUsingLikeNamedField( irProperty ) ) {
ret = setField( irProperty.getOwningIType(),
getField( ((IRPropertyFromPropertyInfo)irProperty).getTerminalProperty() ),
getWritableType( irProperty ),
irProperty.getAccessibility(),
root,
rhs );
}
else {
if( isSuperCall( _stmt().getRootExpression() ) ) {
ret = buildMethodCall( callSpecialMethod( getDescriptor( _cc().getSuperType() ), irProperty.getSetterMethod(), root, exprList( rhs ) ) );
}
else {
IRExpression irMethodCall = callMethod( irProperty.getSetterMethod(), root, exprList( rhs ) );
assignStructuralTypeOwner( rootExpr, irMethodCall );
ret = buildMethodCall( irMethodCall );
}
}
}
else {
ret = reflectivelySetProperty( pi.getOwnersType(), pushConstant( pi.getDisplayName() ), root, false );
}
if( _stmt().isCompoundStatement() )
{
ExpressionTransformer.clearTempSymbolForCompoundAssignment();
return new IRStatementList( false, tempRootAssn, ret );
}
return ret;
}
private IRExpression pushRootExpression( IType rootType, IExpression rootExpr, IRProperty pi ) {
// Push the root expression value
IRExpression root = ExpressionTransformer.compile( rootExpr, _cc() );
//... and make sure it's boxed for the method call
root = boxValue( rootType, root );
if( pi != null && !pi.isStatic() ) {
IRType type = pi.getTargetRootIRType();
if( !type.isAssignableFrom( root.getType() ) ) {
root = buildCast( type, root );
}
}
return root;
}
private boolean isWriteMethodMissingAndUsingLikeNamedField( IRProperty irPi ) {
if( !(irPi instanceof IRPropertyFromPropertyInfo) ) {
return false;
}
IPropertyInfo terminalPi = ((IRPropertyFromPropertyInfo)irPi).getTerminalProperty();
return terminalPi instanceof IJavaPropertyInfo && isField( terminalPi );
}
private IRExpression compileRhs( IRProperty pi ) {
IRExpression rhs = ExpressionTransformer.compile( _stmt().getExpression(), _cc() );
if( !pi.isStatic() ) {
IRType type = getWritableType( pi );
if( !type.isAssignableFrom( rhs.getType() ) ) {
rhs = buildCast( type, rhs );
}
}
return rhs;
}
private IRType getWritableType( IRProperty pi ) {
if( !(pi instanceof IRPropertyFromPropertyInfo) ) {
return pi.getType();
}
IRType type;
IPropertyInfo terminalPi = ((IRPropertyFromPropertyInfo)pi).getTerminalProperty();
if( terminalPi instanceof IJavaPropertyInfo && isField( terminalPi ) ) {
type = IRTypeResolver.getDescriptor( ((IJavaPropertyInfo)terminalPi).getPublicField().getType() );
}
else {
type = pi.getType();
}
return type;
}
private IRStatement reflectivelySetProperty( IType type, IRExpression propertyName, IRExpression root, boolean forceDynamic ) {
IRExpression value = ExpressionTransformer.compile( _stmt().getExpression(), _cc() );
IRExpression setter;
if( forceDynamic || type instanceof IPlaceholder ) {
// Placeholder types, such as snapshot types, have to get properties dynamically. They can't have static properties, though.
if( root == null ) {
throw new IllegalArgumentException( "Cannot invoke a static property reflectively on a placeholder type or via dynamic reflection" );
}
setter = callStaticMethod( GosuRuntimeMethods.class, "setPropertyDynamically", new Class[]{Object.class, String.class, Object.class},
exprList( root, propertyName, value ) );
}
else {
// Everything else should dispatch to the statically-determined property
setter = callStaticMethod( GosuRuntimeMethods.class, "setProperty", new Class[]{Object.class, IType.class, String.class, Object.class},
exprList( root, pushType( type ), propertyName, value ) );
}
return buildMethodCall( setter );
}
private IRStatement assignStaticMember( IPropertyInfo pi, IRProperty irProperty, IRType propertyType ) {
// Unwrap the property, and use the real owner's type as the type to compile against
while( pi instanceof IPropertyInfoDelegate ) {
pi = ((IPropertyInfoDelegate)pi).getSource();
}
IType rootType = pi.getOwnersType();
if( isScopedField( pi ) ) {
IGosuVarPropertyInfo propertyInfo = getActualPropertyInfo( pi );
return setScopedSymbolValue( propertyInfo, _stmt().getExpression() );
}
else {
if( irProperty.isBytecodeProperty() ) {
IRExpression rhs = compileRhs( irProperty );
if( irProperty.isField() ) {
return setStaticField( rootType, getField( pi ), propertyType, AccessibilityUtil.forFeatureInfo( pi ), rhs );
}
else {
return buildMethodCall( callMethod( irProperty.getSetterMethod(), null, exprList( rhs ) ) );
}
}
else {
return reflectivelySetProperty( pi.getOwnersType(), pushConstant( pi.getDisplayName() ), nullLiteral(), false );
}
}
}
private boolean isField( IPropertyInfo pi ) {
while( pi instanceof IPropertyInfoDelegate ) {
pi = ((IPropertyInfoDelegate)pi).getSource();
}
if( pi instanceof JavaPropertyInfo ) {
JavaPropertyInfo jpi = (JavaPropertyInfo)pi;
return jpi.getWriteMethodInfo() == null && jpi.getPublicField() != null;
}
return pi instanceof GosuVarPropertyInfo ||
pi instanceof JavaFieldPropertyInfo ||
((pi instanceof IPropertyInfoDelegate) && isField( ((IPropertyInfoDelegate)pi).getSource() ));
}
private String getField( IPropertyInfo pi ) {
if( !isField( pi ) ) {
throw new IllegalArgumentException( pi.getName() + " is not a 'field' property" );
}
while( pi instanceof IPropertyInfoDelegate ) {
pi = ((IPropertyInfoDelegate)pi).getSource();
}
if( pi instanceof IJavaPropertyInfo ) {
IJavaPropertyInfo jpi = (IJavaPropertyInfo)pi;
return jpi.getPublicField().getName();
}
if( pi.getClass() == JavaFieldPropertyInfo.class ) {
return ((JavaFieldPropertyInfo)pi).getField().getName();
}
return pi.getName();
}
private boolean isSuperCall( IExpression rootExpr ) {
return rootExpr instanceof Identifier && Keyword.KW_super.equals( ((Identifier)rootExpr).getSymbol().getName() );
}
}
| |
package org.springframework.security.web.authentication.ui;
import java.io.IOException;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.WebAttributes;
import org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.security.web.authentication.rememberme.AbstractRememberMeServices;
import org.springframework.security.web.csrf.CsrfToken;
import org.springframework.web.filter.GenericFilterBean;
/**
* For internal use with namespace configuration in the case where a user doesn't
* configure a login page. The configuration code will insert this filter in the chain
* instead.
*
* Will only work if a redirect is used to the login page.
*
* @author Luke Taylor
* @since 2.0
*/
public class DefaultLoginPageGeneratingFilter extends GenericFilterBean {
public static final String DEFAULT_LOGIN_PAGE_URL = "/login";
public static final String ERROR_PARAMETER_NAME = "error";
private String loginPageUrl;
private String logoutSuccessUrl;
private String failureUrl;
private boolean formLoginEnabled;
private boolean openIdEnabled;
private String authenticationUrl;
private String usernameParameter;
private String passwordParameter;
private String rememberMeParameter;
private String openIDauthenticationUrl;
private String openIDusernameParameter;
private String openIDrememberMeParameter;
public DefaultLoginPageGeneratingFilter() {
}
public DefaultLoginPageGeneratingFilter(AbstractAuthenticationProcessingFilter filter) {
if (filter instanceof UsernamePasswordAuthenticationFilter) {
init((UsernamePasswordAuthenticationFilter) filter, null);
}
else {
init(null, filter);
}
}
public DefaultLoginPageGeneratingFilter(
UsernamePasswordAuthenticationFilter authFilter,
AbstractAuthenticationProcessingFilter openIDFilter) {
init(authFilter, openIDFilter);
}
private void init(UsernamePasswordAuthenticationFilter authFilter,
AbstractAuthenticationProcessingFilter openIDFilter) {
this.loginPageUrl = DEFAULT_LOGIN_PAGE_URL;
this.logoutSuccessUrl = DEFAULT_LOGIN_PAGE_URL + "?logout";
this.failureUrl = DEFAULT_LOGIN_PAGE_URL + "?" + ERROR_PARAMETER_NAME;
if (authFilter != null) {
formLoginEnabled = true;
usernameParameter = authFilter.getUsernameParameter();
passwordParameter = authFilter.getPasswordParameter();
if (authFilter.getRememberMeServices() instanceof AbstractRememberMeServices) {
rememberMeParameter = ((AbstractRememberMeServices) authFilter
.getRememberMeServices()).getParameter();
}
}
if (openIDFilter != null) {
openIdEnabled = true;
openIDusernameParameter = "openid_identifier";
if (openIDFilter.getRememberMeServices() instanceof AbstractRememberMeServices) {
openIDrememberMeParameter = ((AbstractRememberMeServices) openIDFilter
.getRememberMeServices()).getParameter();
}
}
}
public boolean isEnabled() {
return formLoginEnabled || openIdEnabled;
}
public void setLogoutSuccessUrl(String logoutSuccessUrl) {
this.logoutSuccessUrl = logoutSuccessUrl;
}
public String getLoginPageUrl() {
return loginPageUrl;
}
public void setLoginPageUrl(String loginPageUrl) {
this.loginPageUrl = loginPageUrl;
}
public void setFailureUrl(String failureUrl) {
this.failureUrl = failureUrl;
}
public void setFormLoginEnabled(boolean formLoginEnabled) {
this.formLoginEnabled = formLoginEnabled;
}
public void setOpenIdEnabled(boolean openIdEnabled) {
this.openIdEnabled = openIdEnabled;
}
public void setAuthenticationUrl(String authenticationUrl) {
this.authenticationUrl = authenticationUrl;
}
public void setUsernameParameter(String usernameParameter) {
this.usernameParameter = usernameParameter;
}
public void setPasswordParameter(String passwordParameter) {
this.passwordParameter = passwordParameter;
}
public void setRememberMeParameter(String rememberMeParameter) {
this.rememberMeParameter = rememberMeParameter;
this.openIDrememberMeParameter = rememberMeParameter;
}
public void setOpenIDauthenticationUrl(String openIDauthenticationUrl) {
this.openIDauthenticationUrl = openIDauthenticationUrl;
}
public void setOpenIDusernameParameter(String openIDusernameParameter) {
this.openIDusernameParameter = openIDusernameParameter;
}
public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
throws IOException, ServletException {
HttpServletRequest request = (HttpServletRequest) req;
HttpServletResponse response = (HttpServletResponse) res;
boolean loginError = isErrorPage(request);
boolean logoutSuccess = isLogoutSuccess(request);
if (isLoginUrlRequest(request) || loginError || logoutSuccess) {
String loginPageHtml = generateLoginPageHtml(request, loginError,
logoutSuccess);
response.setContentType("text/html;charset=UTF-8");
response.setContentLength(loginPageHtml.length());
response.getWriter().write(loginPageHtml);
return;
}
chain.doFilter(request, response);
}
private String generateLoginPageHtml(HttpServletRequest request, boolean loginError,
boolean logoutSuccess) {
String errorMsg = "none";
if (loginError) {
HttpSession session = request.getSession(false);
if (session != null) {
AuthenticationException ex = (AuthenticationException) session
.getAttribute(WebAttributes.AUTHENTICATION_EXCEPTION);
errorMsg = ex != null ? ex.getMessage() : "none";
}
}
StringBuilder sb = new StringBuilder();
sb.append("<html><head><title>Login Page</title></head>");
if (formLoginEnabled) {
sb.append("<body onload='document.f.").append(usernameParameter)
.append(".focus();'>\n");
}
if (loginError) {
sb.append("<p><font color='red'>Your login attempt was not successful, try again.<br/><br/>Reason: ");
sb.append(errorMsg);
sb.append("</font></p>");
}
if (logoutSuccess) {
sb.append("<p><font color='green'>You have been logged out</font></p>");
}
if (formLoginEnabled) {
sb.append("<h3>Login with Username and Password</h3>");
sb.append("<form name='f' action='").append(request.getContextPath())
.append(authenticationUrl).append("' method='POST'>\n");
sb.append("<table>\n");
sb.append(" <tr><td>User:</td><td><input type='text' name='");
sb.append(usernameParameter).append("' value='").append("'></td></tr>\n");
sb.append(" <tr><td>Password:</td><td><input type='password' name='")
.append(passwordParameter).append("'/></td></tr>\n");
if (rememberMeParameter != null) {
sb.append(" <tr><td><input type='checkbox' name='")
.append(rememberMeParameter)
.append("'/></td><td>Remember me on this computer.</td></tr>\n");
}
sb.append(" <tr><td colspan='2'><input name=\"submit\" type=\"submit\" value=\"Login\"/></td></tr>\n");
renderHiddenInputs(sb, request);
sb.append("</table>\n");
sb.append("</form>");
}
if (openIdEnabled) {
sb.append("<h3>Login with OpenID Identity</h3>");
sb.append("<form name='oidf' action='").append(request.getContextPath())
.append(openIDauthenticationUrl).append("' method='POST'>\n");
sb.append("<table>\n");
sb.append(" <tr><td>Identity:</td><td><input type='text' size='30' name='");
sb.append(openIDusernameParameter).append("'/></td></tr>\n");
if (openIDrememberMeParameter != null) {
sb.append(" <tr><td><input type='checkbox' name='")
.append(openIDrememberMeParameter)
.append("'></td><td>Remember me on this computer.</td></tr>\n");
}
sb.append(" <tr><td colspan='2'><input name=\"submit\" type=\"submit\" value=\"Login\"/></td></tr>\n");
sb.append("</table>\n");
renderHiddenInputs(sb, request);
sb.append("</form>");
}
sb.append("</body></html>");
return sb.toString();
}
private void renderHiddenInputs(StringBuilder sb, HttpServletRequest request) {
CsrfToken token = (CsrfToken) request.getAttribute(CsrfToken.class.getName());
if (token != null) {
sb.append(" <input name=\"" + token.getParameterName()
+ "\" type=\"hidden\" value=\"" + token.getToken() + "\" />\n");
}
}
private boolean isLogoutSuccess(HttpServletRequest request) {
return logoutSuccessUrl != null && matches(request, logoutSuccessUrl);
}
private boolean isLoginUrlRequest(HttpServletRequest request) {
return matches(request, loginPageUrl);
}
private boolean isErrorPage(HttpServletRequest request) {
return matches(request, failureUrl);
}
private boolean matches(HttpServletRequest request, String url) {
if (!"GET".equals(request.getMethod()) || url == null) {
return false;
}
String uri = request.getRequestURI();
int pathParamIndex = uri.indexOf(';');
if (pathParamIndex > 0) {
// strip everything after the first semi-colon
uri = uri.substring(0, pathParamIndex);
}
if (request.getQueryString() != null) {
uri += "?" + request.getQueryString();
}
if ("".equals(request.getContextPath())) {
return uri.equals(url);
}
return uri.equals(request.getContextPath() + url);
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.config.util;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.Map;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.*;
public class ConfigUtilsTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Test
public void mergeProperty() {
Integer a = null;
Integer b = null;
assertEquals(null, ConfigUtils.mergeProperty(a, b));
a = Integer.valueOf(1);
assertEquals(null, ConfigUtils.mergeProperty(a, b));
b = Integer.valueOf(10);
assertEquals(null, ConfigUtils.mergeProperty(a, b));
b = Integer.valueOf(1);
assertEquals(Integer.valueOf(1), ConfigUtils.mergeProperty(a, b));
a = null;
assertEquals(null, ConfigUtils.mergeProperty(a, b));
}
@Test
public void meldProperty() {
Integer a = null;
Integer b = null;
assertEquals(null, ConfigUtils.meldProperty(a, b));
a = Integer.valueOf(1);
assertEquals(Integer.valueOf(1), ConfigUtils.meldProperty(a, b));
b = Integer.valueOf(10);
assertEquals(ConfigUtils.mergeProperty(Integer.valueOf(1), Integer.valueOf(10)), ConfigUtils.meldProperty(a, b));
a = null;
assertEquals(Integer.valueOf(10), ConfigUtils.meldProperty(a, b));
}
@Test
public void ceilDivide() {
assertEquals(10, ConfigUtils.ceilDivide(19, 2));
assertEquals(10, ConfigUtils.ceilDivide(20, 2));
assertEquals(11, ConfigUtils.ceilDivide(21, 2));
assertEquals(-9, ConfigUtils.ceilDivide(19, -2));
assertEquals(-10, ConfigUtils.ceilDivide(20, -2));
assertEquals(-10, ConfigUtils.ceilDivide(21, -2));
assertEquals(-9, ConfigUtils.ceilDivide(-19, 2));
assertEquals(-10, ConfigUtils.ceilDivide(-20, 2));
assertEquals(-10, ConfigUtils.ceilDivide(-21, 2));
assertEquals(10, ConfigUtils.ceilDivide(-19, -2));
assertEquals(10, ConfigUtils.ceilDivide(-20, -2));
assertEquals(11, ConfigUtils.ceilDivide(-21, -2));
}
@Test(expected = ArithmeticException.class)
public void ceilDivideByZero() {
ConfigUtils.ceilDivide(20, -0);
}
@Test
public void applyCustomProperties() {
Map<String, String> customProperties = new HashMap<>();
customProperties.put("primitiveBoolean", "true");
customProperties.put("objectBoolean", "true");
customProperties.put("primitiveInt", "1");
customProperties.put("objectInteger", "2");
customProperties.put("primitiveLong", "3");
customProperties.put("objectLong", "4");
customProperties.put("primitiveFloat", "5.5");
customProperties.put("objectFloat", "6.6");
customProperties.put("primitiveDouble", "7.7");
customProperties.put("objectDouble", "8.8");
customProperties.put("bigDecimal", "9.9");
customProperties.put("string", "This is a sentence.");
customProperties.put("configUtilsTestBeanEnum", "BETA");
ConfigUtilsTestBean bean = new ConfigUtilsTestBean();
ConfigUtils.applyCustomProperties(bean, "bean", customProperties, "customProperties");
assertEquals(true, bean.primitiveBoolean);
assertEquals(Boolean.TRUE, bean.objectBoolean);
assertEquals(1, bean.primitiveInt);
assertEquals(Integer.valueOf(2), bean.objectInteger);
assertEquals(3L, bean.primitiveLong);
assertEquals(Long.valueOf(4L), bean.objectLong);
assertEquals(5.5F, bean.primitiveFloat, 0.0F);
assertEquals(Float.valueOf(6.6F), bean.objectFloat);
assertEquals(7.7, bean.primitiveDouble, 0.0);
assertEquals(Double.valueOf(8.8), bean.objectDouble);
assertEquals(new BigDecimal("9.9"), bean.bigDecimal);
assertEquals("This is a sentence.", bean.string);
assertEquals(ConfigUtilsTestBeanEnum.BETA, bean.configUtilsTestBeanEnum);
}
@Test
public void applyCustomPropertiesSubset() {
Map<String, String> customProperties = new HashMap<>();
customProperties.put("string", "This is a sentence.");
ConfigUtilsTestBean bean = new ConfigUtilsTestBean();
ConfigUtils.applyCustomProperties(bean, "bean", customProperties, "customProperties");
assertEquals("This is a sentence.", bean.string);
}
@Test(expected = IllegalStateException.class)
public void applyCustomPropertiesNonExistingCustomProperty() {
Map<String, String> customProperties = new HashMap<>();
customProperties.put("doesNotExist", "This is a sentence.");
ConfigUtilsTestBean bean = new ConfigUtilsTestBean();
ConfigUtils.applyCustomProperties(bean, "bean", customProperties, "customProperties");
}
private static class ConfigUtilsTestBean {
private boolean primitiveBoolean;
private Boolean objectBoolean;
private int primitiveInt;
private Integer objectInteger;
private long primitiveLong;
private Long objectLong;
private float primitiveFloat;
private Float objectFloat;
private double primitiveDouble;
private Double objectDouble;
private BigDecimal bigDecimal;
private String string;
private ConfigUtilsTestBeanEnum configUtilsTestBeanEnum;
public void setPrimitiveBoolean(boolean primitiveBoolean) {
this.primitiveBoolean = primitiveBoolean;
}
public void setObjectBoolean(Boolean objectBoolean) {
this.objectBoolean = objectBoolean;
}
public void setPrimitiveInt(int primitiveInt) {
this.primitiveInt = primitiveInt;
}
public void setObjectInteger(Integer objectInteger) {
this.objectInteger = objectInteger;
}
public void setPrimitiveLong(long primitiveLong) {
this.primitiveLong = primitiveLong;
}
public void setObjectLong(Long objectLong) {
this.objectLong = objectLong;
}
public void setPrimitiveFloat(float primitiveFloat) {
this.primitiveFloat = primitiveFloat;
}
public void setObjectFloat(Float objectFloat) {
this.objectFloat = objectFloat;
}
public void setPrimitiveDouble(double primitiveDouble) {
this.primitiveDouble = primitiveDouble;
}
public void setObjectDouble(Double objectDouble) {
this.objectDouble = objectDouble;
}
public void setBigDecimal(BigDecimal bigDecimal) {
this.bigDecimal = bigDecimal;
}
public void setString(String string) {
this.string = string;
}
public void setConfigUtilsTestBeanEnum(ConfigUtilsTestBeanEnum configUtilsTestBeanEnum) {
this.configUtilsTestBeanEnum = configUtilsTestBeanEnum;
}
}
private enum ConfigUtilsTestBeanEnum {
ALPHA,
BETA,
GAMMA
}
@Test
public void newInstanceStaticInnerClass() {
assertNotNull(ConfigUtils.newInstance(this, "testProperty", StaticInnerClass.class));
}
public static class StaticInnerClass {
}
@Test
public void newInstanceStaticInnerClassWithArgsConstructor() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("no-arg constructor.");
assertNotNull(ConfigUtils.newInstance(this, "testProperty", StaticInnerClassWithArgsConstructor.class));
}
public static class StaticInnerClassWithArgsConstructor {
public StaticInnerClassWithArgsConstructor(int i) {
}
}
@Test
public void newInstanceNonStaticInnerClass() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("inner class");
assertNotNull(ConfigUtils.newInstance(this, "testProperty", NonStaticInnerClass.class));
}
public class NonStaticInnerClass {
}
@Test
public void newInstanceLocalClass() {
class LocalClass {}
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("inner class");
assertNotNull(ConfigUtils.newInstance(this, "testProperty", LocalClass.class));
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.intellij.images.util.imageio;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import org.apache.commons.imaging.*;
import org.apache.commons.imaging.common.bytesource.ByteSource;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageReader;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.spi.ImageReaderSpi;
import javax.imageio.stream.ImageInputStream;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
public class CommonsImagingImageReaderSpi extends ImageReaderSpi {
private final ThreadLocal<ImageFormat> myFormat = new ThreadLocal<>();
private final List<ImageFormat> myFormats;
public CommonsImagingImageReaderSpi() {
super();
vendorName = "JetBrains, s.r.o.";
version = "1.0";
// todo standard GIF/BMP formats can be optionally skipped as well
// JPEG is skipped due to Exception: cannot read or write JPEG images. (JpegImageParser.java:92)
// tiff reader seems to be broken
// PNG reader has bugs with well-compressed PNG images, use standard one instead
myFormats = new ArrayList<>(Arrays.asList(ImageFormats.values()));
myFormats.removeAll(Arrays.asList(ImageFormats.UNKNOWN,
ImageFormats.JPEG,
ImageFormats.TIFF,
ImageFormats.PNG));
names = new String[myFormats.size() * 2];
suffixes = new String[myFormats.size()];
MIMETypes = new String[myFormats.size()];
pluginClassName = MyImageReader.class.getName();
inputTypes = new Class[] {ImageInputStream.class};
for (int i = 0, allFormatsLength = myFormats.size(); i < allFormatsLength; i++) {
final ImageFormat format = myFormats.get(i);
names[2 * i] = StringUtil.toLowerCase(format.getExtension());
names[2 * i + 1] = StringUtil.toLowerCase(format.getExtension());
suffixes[i] = names[2 * i];
MIMETypes[i] = "image/" + names[2 * i];
}
}
@Override
public String getDescription(Locale locale) {
return "Apache Commons Imaging adapter reader";
}
@Override
public boolean canDecodeInput(Object input) throws IOException {
if (!(input instanceof ImageInputStream)) {
return false;
}
final ImageInputStream stream = (ImageInputStream)input;
try {
final ImageFormat imageFormat = Imaging.guessFormat(new MyByteSource(stream));
if (myFormats.contains(imageFormat)) {
myFormat.set(imageFormat);
return true;
}
return false;
}
catch (ImageReadException e) {
throw new IOException(e);
}
}
@Override
public ImageReader createReaderInstance(Object extension) {
return new MyImageReader(this, myFormat.get());
}
private static class MyByteSource extends ByteSource {
private final ImageInputStream myStream;
MyByteSource(final ImageInputStream stream) {
super(stream.toString());
myStream = stream;
}
@Override
public InputStream getInputStream() throws IOException {
myStream.seek(0);
return new InputStream() {
@Override
public int read() throws IOException {
return myStream.read();
}
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
return myStream.read(b, off, len);
}
};
}
@Override
public byte[] getBlock(final int start, final int length) throws IOException {
myStream.seek(start);
final byte[] bytes = new byte[length];
final int read = myStream.read(bytes);
return ArrayUtil.realloc(bytes, read);
}
@Override
public byte[] getBlock(final long start, final int length) throws IOException {
myStream.seek(start);
final byte[] bytes = new byte[length];
final int read = myStream.read(bytes);
return ArrayUtil.realloc(bytes, read);
}
@Override
public byte[] getAll() throws IOException {
return FileUtil.loadBytes(getInputStream());
}
@Override
public long getLength() throws IOException {
return myStream.length();
}
@Override
public String getDescription() {
return myStream.toString();
}
}
private static final class MyImageReader extends ImageReader {
private byte[] myBytes;
private ImageInfo myInfo;
private BufferedImage[] myImages;
private final ImageFormat myDefaultFormat;
private MyImageReader(final CommonsImagingImageReaderSpi provider, final ImageFormat imageFormat) {
super(provider);
myDefaultFormat = imageFormat == null? ImageFormats.UNKNOWN : imageFormat;
}
@Override
public void dispose() {
myBytes = null;
myInfo = null;
myImages = null;
}
@Override
public void setInput(final Object input, final boolean seekForwardOnly, final boolean ignoreMetadata) {
super.setInput(input, seekForwardOnly, ignoreMetadata);
myBytes = null;
myInfo = null;
myImages = null;
}
private ImageInfo getInfo() throws IOException {
if (myInfo == null) {
try {
myInfo = Imaging.getImageInfo(getBytes());
}
catch (ImageReadException e) {
throw new IOException(e);
}
}
return myInfo;
}
private byte[] getBytes() throws IOException {
if (myBytes == null) {
final ImageInputStream stream = (ImageInputStream)input;
myBytes = new MyByteSource(stream).getAll();
}
return myBytes;
}
private BufferedImage[] getImages() throws IOException {
if (myImages == null) {
try {
List<BufferedImage> images = Imaging.getAllBufferedImages(getBytes());
myImages = images.toArray(new BufferedImage[0]);
}
catch (ImageReadException e) {
throw new IOException(e);
}
}
return myImages;
}
@Override
public int getNumImages(final boolean allowSearch) throws IOException {
return getInfo().getNumberOfImages();
}
@Override
public int getWidth(final int imageIndex) throws IOException {
return getInfo().getWidth();
}
@Override
public int getHeight(final int imageIndex) throws IOException {
return getInfo().getHeight();
}
@Override
public Iterator<ImageTypeSpecifier> getImageTypes(final int imageIndex) throws IOException {
return Collections.singletonList(ImageTypeSpecifier.createFromRenderedImage(getImages()[imageIndex])).iterator();
}
@Override
public IIOMetadata getStreamMetadata() throws IOException {
return null;
}
@Override
public IIOMetadata getImageMetadata(final int imageIndex) throws IOException {
return null;
}
@Override
public BufferedImage read(final int imageIndex, final ImageReadParam param) throws IOException {
return getImages()[imageIndex];
}
@Override
public String getFormatName() throws IOException {
// return default if called before setInput
return input == null? myDefaultFormat.getName() : getInfo().getFormat().getName();
}
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.traits.compiler.factmodel.traits;
import org.drools.traits.core.factmodel.MapWrapper;
import org.drools.traits.core.factmodel.TraitProxyImpl;
import org.drools.core.spi.InternalReadAccessor;
import org.drools.core.spi.WriteAccessor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
public class StudentProxyWrapper2 implements Map<String, Object>, MapWrapper {
Imp2 object;
Map<String, Object> map;
public static InternalReadAccessor name_reader;
public static WriteAccessor name_writer;
public static InternalReadAccessor bit_reader;
public static WriteAccessor bit_writer;
public StudentProxyWrapper2( Imp2 object, Map<String,Object> map ) {
this.object = object;
this.map = map;
// System.out.println( map );
// System.out.println( object );
object._setDynamicProperties( map );
// map.put( "age", 0 );
// map.put( "xcsvf" , 0.0 );
// map.put( "name" , null );
// map.put( "csdfsd", 0L );
// map.put( "school" , null );
}
public int size() {
return map.size()
// + ( object.getName() != null ? 1 : 0 )
+ ( object.getSchool() != null ? 1 : 0 )
+ 1
+ ( object.getName() != null ? 1 : 0 )
;
}
public boolean isEmpty() {
return false;
}
public boolean containsKey(Object key) {
if ( "name".equals( key ) ) return true;
if ( "school".equals( key ) ) return true;
return map.containsKey( key );
}
public boolean containsValue(Object value) {
if ( value == null ) {
if ( object.getName() == null ) return true;
if ( object.getSchool() == null ) return true;
return map.containsValue( null );
}
return true;
}
public Object get( Object key ) {
if ( "name".equals( key ) ) {
return object.getName();
}
if ( "school".equals( key ) ) {
return object.getSchool();
}
return map.get( key );
}
public Object put(String key, Object value) {
if ( "name".equals( key ) ) {
// object.setName( (String) value );
name_writer.setValue( object, value );
return value;
}
if ( "school".equals( key ) ) {
object.setSchool((String) value);
return value;
}
if ( "num".equals( key ) ) {
double d = (Double) value;
bit_writer.setDoubleValue( object, d );
return value;
}
return map.put(key, value);
}
public Object remove(Object key) {
Object val;
// any hard field must be 0-fied
// any soft field must be 0-fied on the map
// other fields will effectively be removed
if ( "name".equals( key ) ) {
// val = object.getName();
// object.setName( null );
val = name_reader.getValue( object );
name_writer.setValue( object, null );
return val;
}
if ( "bol".equals( key ) ) {
// val = object.isBl();
// object.setBl( true );
val = bit_reader.getIntValue( object );
bit_writer.setIntValue( object, 0 );
return val;
}
if ( "age".equals( key ) ) {
val = map.get( "age" );
map.put( "age", 0 );
return val;
}
val = map.remove( key );
return val;
}
public void putAll(Map<? extends String, ? extends Object> m) {
for ( String k : m.keySet() ) {
put( k, m.get( k ) );
}
}
public void clear() {
// object.setName(null);
// object.setD(0);
// object.setBite( null );
// object.setSchool( null );
bit_writer.setIntValue( object, 0 );
name_writer.setValue( object, null );
map.clear();
map.put( "age", 0 );
map.put( "xcsvf" , 0.0 );
map.put( "name" , null );
map.put( "csdfsd", 0L );
map.put( "school " , null );
}
public Set<String> keySet() {
Set<String> set = new HashSet<String>();
set.add("name");
set.add("school");
set.addAll( map.keySet() );
return set;
}
public Collection<Object> values() {
Collection<Object> values = new ArrayList<Object>();
values.add( object.getName() );
values.add( object.getSchool() );
values.addAll( map.values() );
return values;
}
public Set<Entry<String, Object>> entrySet() {
Set<Entry<String, Object>> set = new HashSet<Entry<String, Object>>();
set.add(TraitProxyImpl.buildEntry("name", object.getName() ) );
set.add(TraitProxyImpl.buildEntry("school", object.getSchool()) );
set.addAll( map.entrySet() );
return set;
}
public boolean equals(Object o) {
if (this == o) return true;
MapWrapper that = (MapWrapper) o;
return map.equals( that.getInnerMap() );
}
public int hashCode() {
return map.hashCode();
}
public Map<String, Object> getInnerMap() {
return map;
}
public String toString() {
return "[["+entrySet()+"]]";
}
}
| |
package com.hubspot.singularity.data.history;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.sqlobject.Bind;
import org.skife.jdbi.v2.sqlobject.SqlQuery;
import org.skife.jdbi.v2.sqlobject.SqlUpdate;
import org.skife.jdbi.v2.sqlobject.customizers.Define;
import org.skife.jdbi.v2.sqlobject.mixins.GetHandle;
import org.skife.jdbi.v2.sqlobject.stringtemplate.UseStringTemplate3StatementLocator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.hubspot.singularity.ExtendedTaskState;
import com.hubspot.singularity.OrderDirection;
import com.hubspot.singularity.SingularityDeployHistory;
import com.hubspot.singularity.SingularityRequestHistory;
import com.hubspot.singularity.SingularityTaskIdHistory;
import com.hubspot.singularity.data.history.SingularityMappers.SingularityRequestIdCount;
@UseStringTemplate3StatementLocator
public abstract class HistoryJDBI implements GetHandle {
private static final Logger LOG = LoggerFactory.getLogger(HistoryJDBI.class);
@SqlUpdate("INSERT INTO requestHistory (requestId, request, createdAt, requestState, user, message) VALUES (:requestId, :request, :createdAt, :requestState, :user, :message)")
abstract void insertRequestHistory(@Bind("requestId") String requestId, @Bind("request") byte[] request, @Bind("createdAt") Date createdAt, @Bind("requestState") String requestState, @Bind("user") String user, @Bind("message") String message);
@SqlUpdate("INSERT INTO deployHistory (requestId, deployId, createdAt, user, message, deployStateAt, deployState, bytes) VALUES (:requestId, :deployId, :createdAt, :user, :message, :deployStateAt, :deployState, :bytes)")
abstract void insertDeployHistory(@Bind("requestId") String requestId, @Bind("deployId") String deployId, @Bind("createdAt") Date createdAt, @Bind("user") String user, @Bind("message") String message, @Bind("deployStateAt") Date deployStateAt, @Bind("deployState") String deployState, @Bind("bytes") byte[] bytes);
@SqlUpdate("INSERT INTO taskHistory (requestId, taskId, bytes, updatedAt, lastTaskStatus, runId, deployId, host, startedAt, purged) VALUES (:requestId, :taskId, :bytes, :updatedAt, :lastTaskStatus, :runId, :deployId, :host, :startedAt, false)")
abstract void insertTaskHistory(@Bind("requestId") String requestId, @Bind("taskId") String taskId, @Bind("bytes") byte[] bytes, @Bind("updatedAt") Date updatedAt,
@Bind("lastTaskStatus") String lastTaskStatus, @Bind("runId") String runId, @Bind("deployId") String deployId, @Bind("host") String host,
@Bind("startedAt") Date startedAt);
@SqlQuery("SELECT bytes FROM taskHistory WHERE taskId = :taskId")
abstract byte[] getTaskHistoryForTask(@Bind("taskId") String taskId);
@SqlQuery("SELECT bytes FROM taskHistory WHERE requestId = :requestId AND runId = :runId")
abstract byte[] getTaskHistoryForTaskByRunId(@Bind("requestId") String requestId, @Bind("runId") String runId);
@SqlQuery("SELECT bytes FROM deployHistory WHERE requestId = :requestId AND deployId = :deployId")
abstract byte[] getDeployHistoryForDeploy(@Bind("requestId") String requestId, @Bind("deployId") String deployId);
@SqlQuery("SELECT requestId, deployId, createdAt, user, message, deployStateAt, deployState FROM deployHistory WHERE requestId = :requestId ORDER BY createdAt DESC LIMIT :limitStart, :limitCount")
abstract List<SingularityDeployHistory> getDeployHistoryForRequest(@Bind("requestId") String requestId, @Bind("limitStart") Integer limitStart, @Bind("limitCount") Integer limitCount);
@SqlQuery("SELECT COUNT(*) FROM deployHistory WHERE requestId = :requestId")
abstract int getDeployHistoryForRequestCount(@Bind("requestId") String requestId);
@SqlQuery("SELECT request, createdAt, requestState, user, message FROM requestHistory WHERE requestId = :requestId ORDER BY createdAt <orderDirection> LIMIT :limitStart, :limitCount")
abstract List<SingularityRequestHistory> getRequestHistory(@Bind("requestId") String requestId, @Define("orderDirection") String orderDirection, @Bind("limitStart") Integer limitStart, @Bind("limitCount") Integer limitCount);
@SqlQuery("SELECT COUNT(*) FROM requestHistory WHERE requestId = :requestId")
abstract int getRequestHistoryCount(@Bind("requestId") String requestId);
@SqlQuery("SELECT DISTINCT requestId FROM requestHistory WHERE requestId LIKE CONCAT(:requestIdLike, '%') LIMIT :limitStart, :limitCount")
abstract List<String> getRequestHistoryLike(@Bind("requestIdLike") String requestIdLike, @Bind("limitStart") Integer limitStart, @Bind("limitCount") Integer limitCount);
@SqlQuery("SELECT requestId, COUNT(*) as count FROM taskHistory WHERE updatedAt \\< :updatedAt GROUP BY requestId")
abstract List<SingularityRequestIdCount> getRequestIdCounts(@Bind("updatedAt") Date updatedAt);
@SqlQuery("SELECT MIN(updatedAt) from (SELECT updatedAt FROM taskHistory WHERE requestId = :requestId ORDER BY updatedAt DESC LIMIT :limit) as alias")
abstract Date getMinUpdatedAtWithLimitForRequest(@Bind("requestId") String requestId, @Bind("limit") Integer limit);
@SqlUpdate("UPDATE taskHistory SET bytes = '', purged = true WHERE requestId = :requestId AND purged = false AND updatedAt \\< :updatedAtBefore LIMIT :purgeLimitPerQuery")
abstract void updateTaskHistoryNullBytesForRequestBefore(@Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore, @Bind("purgeLimitPerQuery") Integer purgeLimitPerQuery);
@SqlUpdate("DELETE FROM taskHistory WHERE requestId = :requestId AND updatedAt \\< :updatedAtBefore LIMIT :purgeLimitPerQuery")
abstract void deleteTaskHistoryForRequestBefore(@Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore, @Bind("purgeLimitPerQuery") Integer purgeLimitPerQuery);
@SqlQuery("SELECT DISTINCT requestId FROM taskHistory")
abstract List<String> getRequestIdsInTaskHistory();
@SqlQuery("SELECT COUNT(*) FROM taskHistory WHERE requestId = :requestId AND purged = false AND updatedAt \\< :updatedAtBefore")
abstract int getUnpurgedTaskHistoryCountByRequestBefore(@Bind("requestId") String requestId, @Bind("updatedAtBefore") Date updatedAtBefore);
abstract void close();
private static final String GET_TASK_ID_HISTORY_QUERY = "SELECT taskId, requestId, updatedAt, lastTaskStatus, runId FROM taskHistory";
private static final String GET_TASK_ID_HISTORY_COUNT_QUERY = "SELECT COUNT(*) FROM taskHistory";
private void addWhereOrAnd(StringBuilder sqlBuilder, boolean shouldUseWhere) {
if (shouldUseWhere) {
sqlBuilder.append(" WHERE ");
} else {
sqlBuilder.append(" AND ");
}
}
private void applyTaskIdHistoryBaseQuery(StringBuilder sqlBuilder, Map<String, Object> binds, Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host,
Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore,
Optional<Long> updatedAfter) {
if (requestId.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("requestId = :requestId");
binds.put("requestId", requestId.get());
}
if (deployId.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("deployId = :deployId");
binds.put("deployId", deployId.get());
}
if (runId.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("runId = :runId");
binds.put("runId", runId.get());
}
if (host.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("host = :host");
binds.put("host", host.get());
}
if (lastTaskStatus.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("lastTaskStatus = :lastTaskStatus");
binds.put("lastTaskStatus", lastTaskStatus.get().name());
}
if (startedBefore.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("startedAt < :startedBefore");
binds.put("startedBefore", new Date(startedBefore.get()));
}
if (startedAfter.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("startedAt > :startedAfter");
binds.put("startedAfter", new Date(startedAfter.get()));
}
if (updatedBefore.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("updatedAt < :updatedBefore");
binds.put("updatedBefore", new Date(updatedBefore.get()));
}
if (updatedAfter.isPresent()) {
addWhereOrAnd(sqlBuilder, binds.isEmpty());
sqlBuilder.append("updatedAt > :updatedAfter");
binds.put("updatedAfter", new Date(updatedAfter.get()));
}
}
public List<SingularityTaskIdHistory> getTaskIdHistory(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host,
Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore,
Optional<Long> updatedAfter, Optional<OrderDirection> orderDirection, Optional<Integer> limitStart, Integer limitCount) {
final Map<String, Object> binds = new HashMap<>();
final StringBuilder sqlBuilder = new StringBuilder(GET_TASK_ID_HISTORY_QUERY);
applyTaskIdHistoryBaseQuery(sqlBuilder, binds, requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter);
sqlBuilder.append(" ORDER BY startedAt ");
sqlBuilder.append(orderDirection.or(OrderDirection.DESC).name());
if (!requestId.isPresent()) {
sqlBuilder.append(", requestId ");
sqlBuilder.append(orderDirection.or(OrderDirection.DESC).name());
}
if (limitStart.isPresent()) {
sqlBuilder.append(" LIMIT :limitStart, ");
binds.put("limitStart", limitStart.get());
} else {
sqlBuilder.append(" LIMIT ");
}
sqlBuilder.append(":limitCount");
binds.put("limitCount", limitCount);
final String sql = sqlBuilder.toString();
LOG.trace("Generated sql for task search: {}, binds: {}", sql, binds);
final Query<SingularityTaskIdHistory> query = getHandle().createQuery(sql).mapTo(SingularityTaskIdHistory.class);
for (Map.Entry<String, Object> entry : binds.entrySet()) {
query.bind(entry.getKey(), entry.getValue());
}
return query.list();
}
public int getTaskIdHistoryCount(Optional<String> requestId, Optional<String> deployId, Optional<String> runId, Optional<String> host,
Optional<ExtendedTaskState> lastTaskStatus, Optional<Long> startedBefore, Optional<Long> startedAfter, Optional<Long> updatedBefore,
Optional<Long> updatedAfter) {
final Map<String, Object> binds = new HashMap<>();
final StringBuilder sqlBuilder = new StringBuilder(GET_TASK_ID_HISTORY_COUNT_QUERY);
applyTaskIdHistoryBaseQuery(sqlBuilder, binds, requestId, deployId, runId, host, lastTaskStatus, startedBefore, startedAfter, updatedBefore, updatedAfter);
final String sql = sqlBuilder.toString();
LOG.trace("Generated sql for task search count: {}, binds: {}", sql, binds);
final Query<Integer> query = getHandle().createQuery(sql).mapTo(Integer.class);
for (Map.Entry<String, Object> entry : binds.entrySet()) {
query.bind(entry.getKey(), entry.getValue());
}
return query.first();
}
}
| |
/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj;
import edu.wpi.first.wpilibj.fpga.tEncoder;
import edu.wpi.first.wpilibj.parsing.ISensor;
import edu.wpi.first.wpilibj.util.AllocationException;
import edu.wpi.first.wpilibj.util.CheckedAllocationException;
/**
* Class to read quad encoders.
* Quadrature encoders are devices that count shaft rotation and can sense direction. The output of
* the QuadEncoder class is an integer that can count either up or down, and can go negative for
* reverse direction counting. When creating QuadEncoders, a direction is supplied that changes the
* sense of the output to make code more readable if the encoder is mounted such that forward movement
* generates negative values. Quadrature encoders have two digital outputs, an A Channel and a B Channel
* that are out of phase with each other to allow the FPGA to do direction sensing.
*/
public class Encoder extends SensorBase implements CounterBase, PIDSource, ISensor {
public static class PIDSourceParameter {
public final int value;
static final int kDistance_val = 0;
static final int kRate_val = 1;
public static final PIDSourceParameter kDistance = new PIDSourceParameter(kDistance_val);
public static final PIDSourceParameter kRate = new PIDSourceParameter(kRate_val);
private PIDSourceParameter(int value) {
this.value = value;
}
}
static Resource quadEncoders = new Resource(tEncoder.kNumSystems);
/**
* The a source
*/
protected DigitalSource m_aSource; // the A phase of the quad encoder
/**
* The b source
*/
protected DigitalSource m_bSource; // the B phase of the quad encoder
/**
* The index source
*/
protected DigitalSource m_indexSource = null; //Index on some encoders
private tEncoder m_encoder;
private int m_index;
private double m_distancePerPulse; // distance of travel for each encoder tick
private Counter m_counter; // Counter object for 1x and 2x encoding
private EncodingType m_encodingType = EncodingType.k4X;
private boolean m_allocatedA;
private boolean m_allocatedB;
private boolean m_allocatedI;
private PIDSourceParameter m_pidSource;
/**
* Common initialization code for Encoders.
* This code allocates resources for Encoders and is common to all constructors.
* @param reverseDirection If true, counts down instead of up (this is all relative)
* @param encodingType either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If 4X is
* selected, then an encoder FPGA object is used and the returned counts will be 4x the encoder
* spec'd value since all rising and falling edges are counted. If 1X or 2X are selected then
* a counter object will be used and the returned value will either exactly match the spec'd count
* or be double (2x) the spec'd count.
*/
private void initEncoder(boolean reverseDirection) {
switch (m_encodingType.value) {
case EncodingType.k4X_val:
try {
m_index = quadEncoders.allocate();
} catch (CheckedAllocationException e) {
throw new AllocationException("There are no encoders left to allocate");
}
m_encoder = new tEncoder(m_index);
m_encoder.writeConfig_ASource_Module(m_aSource.getModuleForRouting());
m_encoder.writeConfig_ASource_Channel(m_aSource.getChannelForRouting());
m_encoder.writeConfig_ASource_AnalogTrigger(m_aSource.getAnalogTriggerForRouting());
m_encoder.writeConfig_BSource_Module(m_bSource.getModuleForRouting());
m_encoder.writeConfig_BSource_Channel(m_bSource.getChannelForRouting());
m_encoder.writeConfig_BSource_AnalogTrigger(m_bSource.getAnalogTriggerForRouting());
m_encoder.strobeReset();
m_encoder.writeConfig_Reverse(reverseDirection);
m_encoder.writeTimerConfig_AverageSize(1);
if (m_indexSource != null) {
m_encoder.writeConfig_IndexSource_Module(m_indexSource.getModuleForRouting());
m_encoder.writeConfig_IndexSource_Channel(m_indexSource.getChannelForRouting());
m_encoder.writeConfig_IndexSource_AnalogTrigger(m_indexSource.getAnalogTriggerForRouting());
m_encoder.writeConfig_IndexActiveHigh(true);
}
m_counter = null;
break;
case EncodingType.k2X_val:
case EncodingType.k1X_val:
m_counter = new Counter(m_encodingType, m_aSource, m_bSource, reverseDirection);
break;
}
m_distancePerPulse = 1.0;
}
/**
* Encoder constructor.
* Construct a Encoder given a and b modules and channels fully specified.
* @param aSlot The a channel digital input module.
* @param aChannel The a channel digital input channel.
* @param bSlot The b channel digital input module.
* @param bChannel The b channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(final int aSlot, final int aChannel,
final int bSlot, final int bChannel,
boolean reverseDirection) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = false;
m_aSource = new DigitalInput(aSlot, aChannel);
m_bSource = new DigitalInput(bSlot, bChannel);
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b modules and channels fully specified.
* @param aSlot The a channel digital input module.
* @param aChannel The a channel digital input channel.
* @param bSlot The b channel digital input module.
* @param bChannel The b channel digital input channel.
*/
public Encoder(final int aSlot, final int aChannel,
final int bSlot, final int bChannel) {
this(aSlot, aChannel, bSlot, bChannel, false);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b modules and channels fully specified.
* @param aSlot The a channel digital input module.
* @param aChannel The a channel digital input channel.
* @param bSlot The b channel digital input module.
* @param bChannel The b channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
* @param encodingType either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If 4X is
* selected, then an encoder FPGA object is used and the returned counts will be 4x the encoder
* spec'd value since all rising and falling edges are counted. If 1X or 2X are selected then
* a counter object will be used and the returned value will either exactly match the spec'd count
* or be double (2x) the spec'd count.
*/
public Encoder(final int aSlot, final int aChannel,
final int bSlot, final int bChannel,
boolean reverseDirection, final EncodingType encodingType) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = false;
m_aSource = new DigitalInput(aSlot, aChannel);
m_bSource = new DigitalInput(bSlot, bChannel);
if (encodingType == null)
throw new NullPointerException("Given encoding type was null");
m_encodingType = encodingType;
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b modules and channels fully specified.
* Using the index pulse forces 4x encoding.
* @param aSlot The a channel digital input module.
* @param aChannel The a channel digital input channel.
* @param bSlot The b channel digital input module.
* @param bChannel The b channel digital input channel.
* @param indexSlot The index channel digital input module.
* @param indexChannel The index channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(final int aSlot, final int aChannel,
final int bSlot, final int bChannel, final int indexSlot,
final int indexChannel,
boolean reverseDirection) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = true;
m_aSource = new DigitalInput(aSlot, aChannel);
m_bSource = new DigitalInput(bSlot, bChannel);
m_indexSource = new DigitalInput(indexSlot, indexChannel);
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b modules and channels fully specified.
* Using the index pulse forces 4x encoding.
* @param aSlot The a channel digital input module.
* @param aChannel The a channel digital input channel.
* @param bSlot The b channel digital input module.
* @param bChannel The b channel digital input channel.
* @param indexSlot The index channel digital input module.
* @param indexChannel The index channel digital input channel.
*/
public Encoder(final int aSlot, final int aChannel,
final int bSlot, final int bChannel, final int indexSlot,
final int indexChannel) {
this(aSlot, aChannel, bSlot, bChannel, indexSlot, indexChannel, false);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels assuming the default module.
* @param aChannel The a channel digital input channel.
* @param bChannel The b channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(final int aChannel, final int bChannel, boolean reverseDirection) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = false;
m_aSource = new DigitalInput(aChannel);
m_bSource = new DigitalInput(bChannel);
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels assuming the default module.
* @param aChannel The a channel digital input channel.
* @param bChannel The b channel digital input channel.
*/
public Encoder(final int aChannel, final int bChannel) {
this(aChannel, bChannel, false);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels assuming the default module.
* @param aChannel The a channel digital input channel.
* @param bChannel The b channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
* @param encodingType either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If 4X is
* selected, then an encoder FPGA object is used and the returned counts will be 4x the encoder
* spec'd value since all rising and falling edges are counted. If 1X or 2X are selected then
* a counter object will be used and the returned value will either exactly match the spec'd count
* or be double (2x) the spec'd count.
*/
public Encoder(final int aChannel, final int bChannel, boolean reverseDirection, final EncodingType encodingType) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = false;
if (encodingType == null)
throw new NullPointerException("Given encoding type was null");
m_encodingType = encodingType;
m_aSource = new DigitalInput(aChannel);
m_bSource = new DigitalInput(bChannel);
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels assuming the default module.
* Using an index pulse forces 4x encoding
* @param aChannel The a channel digital input channel.
* @param bChannel The b channel digital input channel.
* @param indexChannel The index channel digital input channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(final int aChannel, final int bChannel, final int indexChannel, boolean reverseDirection) {
m_allocatedA = true;
m_allocatedB = true;
m_allocatedI = true;
m_aSource = new DigitalInput(aChannel);
m_bSource = new DigitalInput(bChannel);
m_indexSource = new DigitalInput(indexChannel);
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels assuming the default module.
* Using an index pulse forces 4x encoding
* @param aChannel The a channel digital input channel.
* @param bChannel The b channel digital input channel.
* @param indexChannel The index channel digital input channel.
*/
public Encoder(final int aChannel, final int bChannel, final int indexChannel) {
this(aChannel, bChannel, indexChannel, false);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels as digital inputs. This is used in the case
* where the digital inputs are shared. The Encoder class will not allocate the digital inputs
* and assume that they already are counted.
* @param aSource The source that should be used for the a channel.
* @param bSource the source that should be used for the b channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(DigitalSource aSource, DigitalSource bSource, boolean reverseDirection) {
m_allocatedA = false;
m_allocatedB = false;
m_allocatedI = false;
if (aSource == null)
throw new NullPointerException("Digital Source A was null");
m_aSource = aSource;
if (bSource == null)
throw new NullPointerException("Digital Source B was null");
m_bSource = bSource;
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels as digital inputs. This is used in the case
* where the digital inputs are shared. The Encoder class will not allocate the digital inputs
* and assume that they already are counted.
* @param aSource The source that should be used for the a channel.
* @param bSource the source that should be used for the b channel.
*/
public Encoder(DigitalSource aSource, DigitalSource bSource) {
this(aSource, bSource, false);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels as digital inputs. This is used in the case
* where the digital inputs are shared. The Encoder class will not allocate the digital inputs
* and assume that they already are counted.
* @param aSource The source that should be used for the a channel.
* @param bSource the source that should be used for the b channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
* @param encodingType either k1X, k2X, or k4X to indicate 1X, 2X or 4X decoding. If 4X is
* selected, then an encoder FPGA object is used and the returned counts will be 4x the encoder
* spec'd value since all rising and falling edges are counted. If 1X or 2X are selected then
* a counter object will be used and the returned value will either exactly match the spec'd count
* or be double (2x) the spec'd count.
*/
public Encoder(DigitalSource aSource, DigitalSource bSource, boolean reverseDirection, final EncodingType encodingType) {
m_allocatedA = false;
m_allocatedB = false;
m_allocatedI = false;
if (encodingType == null)
throw new NullPointerException("Given encoding type was null");
m_encodingType = encodingType;
if (aSource == null)
throw new NullPointerException("Digital Source A was null");
m_aSource = aSource;
if (bSource == null)
throw new NullPointerException("Digital Source B was null");
m_aSource = aSource;
m_bSource = bSource;
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels as digital inputs. This is used in the case
* where the digital inputs are shared. The Encoder class will not allocate the digital inputs
* and assume that they already are counted.
* @param aSource The source that should be used for the a channel.
* @param bSource the source that should be used for the b channel.
* @param indexSource the source that should be used for the index channel.
* @param reverseDirection represents the orientation of the encoder and inverts the output values
* if necessary so forward represents positive values.
*/
public Encoder(DigitalSource aSource, DigitalSource bSource,
DigitalSource indexSource, boolean reverseDirection) {
m_allocatedA = false;
m_allocatedB = false;
m_allocatedI = false;
if (aSource == null)
throw new NullPointerException("Digital Source A was null");
m_aSource = aSource;
if (bSource == null)
throw new NullPointerException("Digital Source B was null");
m_aSource = aSource;
m_bSource = bSource;
m_indexSource = indexSource;
initEncoder(reverseDirection);
}
/**
* Encoder constructor.
* Construct a Encoder given a and b channels as digital inputs. This is used in the case
* where the digital inputs are shared. The Encoder class will not allocate the digital inputs
* and assume that they already are counted.
* @param aSource The source that should be used for the a channel.
* @param bSource the source that should be used for the b channel.
* @param indexSource the source that should be used for the index channel.
*/
public Encoder(DigitalSource aSource, DigitalSource bSource,
DigitalSource indexSource) {
this(aSource, bSource, indexSource, false);
}
protected void free() {
if (m_aSource != null && m_allocatedA) {
m_aSource.free();
m_allocatedA = false;
}
if (m_bSource != null && m_allocatedB) {
m_bSource.free();
m_allocatedB = false;
}
if (m_indexSource != null && m_allocatedI) {
m_indexSource.free();
m_allocatedI = false;
}
m_aSource = null;
m_bSource = null;
m_indexSource = null;
if (m_counter != null) {
m_counter.free();
m_counter = null;
} else {
m_encoder.Release();
quadEncoders.free(m_index);
m_encoder = null;
}
}
/**
* Start the Encoder.
* Starts counting pulses on the Encoder device.
*/
public void start() {
if (m_counter != null) {
m_counter.start();
} else {
m_encoder.writeConfig_Enable(true);
}
}
/**
* Stops counting pulses on the Encoder device. The value is not changed.
*/
public void stop() {
if (m_counter != null) {
m_counter.stop();
} else {
m_encoder.writeConfig_Enable(false);
}
}
/**
* Gets the raw value from the encoder.
* The raw value is the actual count unscaled by the 1x, 2x, or 4x scale
* factor.
* @return Current raw count from the encoder
*/
public int getRaw() {
int value;
if (m_counter != null) {
value = m_counter.get();
} else {
value = m_encoder.readOutput_Value();
}
return value;
}
/**
* Gets the current count.
* Returns the current count on the Encoder.
* This method compensates for the decoding type.
*
* @return Current count from the Encoder adjusted for the 1x, 2x, or 4x scale factor.
*/
public int get() {
return (int) (getRaw() * decodingScaleFactor());
}
/**
* Reset the Encoder distance to zero.
* Resets the current count to zero on the encoder.
*/
public void reset() {
if (m_counter != null) {
m_counter.reset();
} else {
m_encoder.strobeReset();
}
}
/**
* Returns the period of the most recent pulse.
* Returns the period of the most recent Encoder pulse in seconds.
* This method compensates for the decoding type.
*
* @deprecated Use getRate() in favor of this method. This returns unscaled periods and getRate() scales using value from setDistancePerPulse().
*
* @return Period in seconds of the most recent pulse.
*/
public double getPeriod() {
double measuredPeriod;
if (m_counter != null) {
measuredPeriod = m_counter.getPeriod();
} else {
double value;
if (m_encoder.readTimerOutput_Stalled()) {
return Double.POSITIVE_INFINITY;
} else {
// output.Period is a fixed point number that counts by 2 (24 bits, 25 integer bits),
// but tEncoder.readTimerOutput_Period() handles integer shift already
value = (double)m_encoder.readTimerOutput_Period() / (double) m_encoder.readTimerOutput_Count();
}
measuredPeriod = value * 1.0e-6;
}
return measuredPeriod / decodingScaleFactor();
}
/**
* Sets the maximum period for stopped detection.
* Sets the value that represents the maximum period of the Encoder before it will assume
* that the attached device is stopped. This timeout allows users to determine if the wheels or
* other shaft has stopped rotating.
* This method compensates for the decoding type.
*
*
* @param maxPeriod The maximum time between rising and falling edges before the FPGA will
* report the device stopped. This is expressed in seconds.
*/
public void setMaxPeriod(double maxPeriod) {
if (m_counter != null) {
m_counter.setMaxPeriod(maxPeriod * decodingScaleFactor());
} else {
m_encoder.writeTimerConfig_StallPeriod((int) (maxPeriod * 1.0e6 * decodingScaleFactor()));
}
}
/**
* Determine if the encoder is stopped.
* Using the MaxPeriod value, a boolean is returned that is true if the encoder is considered
* stopped and false if it is still moving. A stopped encoder is one where the most recent pulse
* width exceeds the MaxPeriod.
* @return True if the encoder is considered stopped.
*/
public boolean getStopped() {
if (m_counter != null) {
return m_counter.getStopped();
} else {
boolean value = m_encoder.readTimerOutput_Stalled() != false;
return value;
}
}
/**
* The last direction the encoder value changed.
* @return The last direction the encoder value changed.
*/
public boolean getDirection() {
if (m_counter != null) {
return m_counter.getDirection();
} else {
boolean value = m_encoder.readOutput_Direction();
return value;
}
}
/**
* The scale needed to convert a raw counter value into a number of encoder pulses.
*/
private double decodingScaleFactor() {
switch (m_encodingType.value) {
case EncodingType.k1X_val:
return 1.0;
case EncodingType.k2X_val:
return 0.5;
case EncodingType.k4X_val:
return 0.25;
default:
//This is never reached, EncodingType enum limits values
return 0.0;
}
}
/**
* Get the distance the robot has driven since the last reset.
*
* @return The distance driven since the last reset as scaled by the value from setDistancePerPulse().
*/
public double getDistance() {
return getRaw() * decodingScaleFactor() * m_distancePerPulse;
}
/**
* Get the current rate of the encoder.
* Units are distance per second as scaled by the value from setDistancePerPulse().
*
* @return The current rate of the encoder.
*/
public double getRate() {
return m_distancePerPulse / getPeriod();
}
/**
* Set the minimum rate of the device before the hardware reports it stopped.
*
* @param minRate The minimum rate. The units are in distance per second as scaled by the value from setDistancePerPulse().
*/
public void setMinRate(double minRate) {
setMaxPeriod(m_distancePerPulse / minRate);
}
/**
* Set the distance per pulse for this encoder.
* This sets the multiplier used to determine the distance driven based on the count value
* from the encoder.
* Do not include the decoding type in this scale. The library already compensates for the decoding type.
* Set this value based on the encoder's rated Pulses per Revolution and
* factor in gearing reductions following the encoder shaft.
* This distance can be in any units you like, linear or angular.
*
* @param distancePerPulse The scale factor that will be used to convert pulses to useful units.
*/
public void setDistancePerPulse(double distancePerPulse) {
m_distancePerPulse = distancePerPulse;
}
/**
* Set the direction sensing for this encoder.
* This sets the direction sensing on the encoder so that it could count in the correct
* software direction regardless of the mounting.
* @param reverseDirection true if the encoder direction should be reversed
*/
public void setReverseDirection(boolean reverseDirection) {
if (m_counter != null) {
m_counter.setReverseDirection(reverseDirection);
} else {
m_encoder.writeConfig_Reverse(reverseDirection);
}
}
/**
* Set which parameter of the encoder you are using as a process control variable.
*
* @param pidSource An enum to select the parameter.
*/
public void setPIDSourceParameter(PIDSourceParameter pidSource) {
m_pidSource = pidSource;
}
/**
* Implement the PIDSource interface.
*
* @return The current value of the selected source parameter.
*/
public double pidGet() {
switch (m_pidSource.value) {
case PIDSourceParameter.kDistance_val:
return getDistance();
case PIDSourceParameter.kRate_val:
return getRate();
default:
return 0.0;
}
}
}
| |
/*
* Copyright (c) 2018 HERE Europe B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.here.account.http.apache;
import com.here.account.http.HttpException;
import com.here.account.http.HttpProvider;
import com.here.account.http.HttpProvider.HttpRequest;
import com.here.account.http.HttpProvider.HttpRequestAuthorizer;
import com.here.account.oauth2.HereAccessTokenProvider;
import com.here.account.oauth2.RequestExecutionException;
import com.here.account.oauth2.retry.Socket5xxExponentialRandomBackoffPolicy;
import com.here.account.oauth2.retry.RetryContext;
import com.here.account.oauth2.retry.RetryPolicy;
import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.HttpEntity;
import org.apache.http.ParseException;
import org.apache.http.client.methods.*;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.protocol.HttpContext;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.HttpURLConnection;
import java.util.*;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
public class ApacheHttpClientProviderTest {
HttpRequestAuthorizer httpRequestAuthorizer;
@Before
public void setUp() {
httpRequestAuthorizer = mock(HttpRequestAuthorizer.class);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
//headers.put((String)invocation.getArguments()[0], (String)invocation.getArguments()[1]);
return null;
}
}).when(httpRequestAuthorizer).authorize(any(HttpRequest.class), any(String.class), any(String.class),
(Map<String, List<String>>) any(Map.class));
httpProvider = ApacheHttpClientProvider.builder().build();
url = "http://localhost:8080/path/to";
formParams = null;
}
@Test
public void test_javadocs() throws IOException {
HttpProvider httpProvider = ApacheHttpClientProvider.builder().build();
// use httpProvider such as with HereAccessTokenProviders...
assertTrue("httpProvider was null", null != httpProvider);
httpProvider.close();
}
HttpRequest httpRequest;
HttpProvider httpProvider;
String url;
Map<String, List<String>> formParams;
@Test(expected = IllegalArgumentException.class)
public void test_wrongRequestClass() throws HttpException, IOException {
httpProvider = (ApacheHttpClientProvider) ApacheHttpClientProvider.builder().build();
HttpRequest httpRequest = new HttpRequest() {
@Override
public void addAuthorizationHeader(String value) {
// no-op
}
@Override
public void addHeader(String name, String value) {
// no-op
}
};
httpProvider.execute(httpRequest);
}
@Test
public void test_ApacheHttpClientResponse() throws HttpException, IOException {
String requestBodyJson = "{\"foo\":\"bar\"}";
url = "http://google.com";
httpProvider = ApacheHttpClientProvider.builder().build();
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, requestBodyJson);
HttpProvider.HttpResponse response = httpProvider.execute(httpRequest);
assertEquals(HttpURLConnection.HTTP_BAD_METHOD, response.getStatusCode());
assertNotNull("response body is null", response.getResponseBody());
assertTrue("response content length is 0", 0<response.getContentLength());
assertTrue("Content-Type Header should be present", response.getHeaders().get("Content-Type") != null);
}
private static class MyHeader implements Header {
private final String name;
private final String value;
public MyHeader(String name, String value) {
this.name = name;
this.value = value;
}
@Override
public String getName() {
return name;
}
@Override
public String getValue() {
return value;
}
@Override
public HeaderElement[] getElements() throws ParseException {
return new HeaderElement[0];
}
}
@Test
public void test_getHeaders() throws IOException, HttpException {
String requestBodyJson = "{\"foo\":\"bar\"}";
url = "http://example.com";
CloseableHttpClient closeableHttpClient = Mockito.mock(CloseableHttpClient.class);
CloseableHttpResponse closeableHttpResponse = Mockito.mock(CloseableHttpResponse.class);
Mockito.when(closeableHttpClient.execute(Mockito.any(HttpRequestBase.class), Mockito.any(HttpContext.class)))
.thenReturn(closeableHttpResponse);
List<Header> headersList = new ArrayList<Header>();
Header fooHeader = new MyHeader("foo", "bar");
headersList.add(fooHeader);
Header setCookie1Header = new MyHeader("Set-Cookie", "a=b");
headersList.add(setCookie1Header);
Header setCookie2Header = new MyHeader("Set-Cookie", "c=d");
headersList.add(setCookie2Header);
Header[] headers = headersList.toArray(new Header[headersList.size()]);
Mockito.when(closeableHttpResponse.getAllHeaders())
.thenReturn(headers);
httpProvider = ApacheHttpClientProvider.builder()
.setHttpClient(closeableHttpClient)
.build();
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, requestBodyJson);
HttpProvider.HttpResponse response = httpProvider.execute(httpRequest);
assertTrue("response is null", null != response);
Map<String, List<String>> headersMap = response.getHeaders();
assertTrue("headersMap was null", null != headersMap);
List<String> values = headersMap.get(fooHeader.getName());
assertTrue("values was expected to contain " + fooHeader.getValue() + ", but was " + values,
null != values && 1 == values.size() && fooHeader.getValue().equals(values.get(0)));
}
@Test
public void test_ApacheHttpClientResponse_additionalHeaders() throws HttpException, IOException {
String requestBodyJson = "{\"foo\":\"bar\"}";
url = "http://google.com";
httpProvider = ApacheHttpClientProvider.builder().build();
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, requestBodyJson);
final String additionalHeaderName = "foohead";
final String additionalHeaderValue = "barval";
httpRequest.addHeader(additionalHeaderName, additionalHeaderValue);
HttpRequestBase httpRequestBase = ApacheHttpClientProviderExposer.getHttpRequestBase(httpRequest);
Header[] headers = httpRequestBase.getHeaders(additionalHeaderName);
assertTrue("headers was null", null != headers);
int expectedLength = 1;
int length = headers.length;
assertTrue("headers was expected length " + expectedLength + ", actual length " + length,
expectedLength == length);
Header header = headers[0];
assertTrue("header was null", null != header);
String name = header.getName();
String value = header.getValue();
assertTrue("name was expected " + additionalHeaderName + ", actual " + name,
additionalHeaderName.equals(name));
assertTrue("value was expected " + additionalHeaderValue + ", actual " + value,
additionalHeaderValue.equals(value));
}
@Test
public void test_badUri() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
url = "htp:/ asdf8080:a:z";
try {
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "GET", url, formParams);
fail("should have thrown exception for url "+url+", but didn't");
} catch (IllegalArgumentException e) {
// expected
String message = e.getMessage();
String expectedContains = "malformed URL";
assertTrue("expected contains "+expectedContains+", actual "+message, message.contains(expectedContains));
}
}
@Test
public void test_methodDoesntSupportFormParams() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
formParams = new HashMap<String, List<String>>();
formParams.put("foo", Collections.singletonList("bar"));
try {
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "DELETE", url, formParams);
fail("should have thrown exception for formParams with method DELETE, but didn't");
} catch (IllegalArgumentException e) {
// expected
String message = e.getMessage();
String expectedContains = "no formParams permitted for method";
assertTrue("expected contains "+expectedContains+", actual "+message, message.contains(expectedContains));
}
}
@Test
public void test_formParamsPut() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
formParams = new HashMap<String, List<String>>();
formParams.put("foo", Collections.singletonList("bar"));
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, formParams);
HttpRequestBase httpRequestBase = getHttpRequestBase();
HttpPut httpPut = (HttpPut) httpRequestBase;
HttpEntity httpEntity = httpPut.getEntity();
assertTrue("httpEntity was null", null != httpEntity);
}
@Test
public void test_formParamsPut_null() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
formParams = null;
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, formParams);
HttpRequestBase httpRequestBase = getHttpRequestBase();
HttpPut httpPut = (HttpPut) httpRequestBase;
HttpEntity httpEntity = httpPut.getEntity();
assertTrue("httpEntity was expected null, actual "+httpEntity, null == httpEntity);
}
@Test
public void test_methodDoesntSupportJson() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
String requestBodyJson = "{\"foo\":\"bar\"}";
try {
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "DELETE", url, requestBodyJson);
fail("should have thrown exception for JSON body with method DELETE, but didn't");
} catch (IllegalArgumentException e) {
// expected
String message = e.getMessage();
String expectedContains = "no JSON request body permitted for method";
assertTrue("expected contains "+expectedContains+", actual "+message, message.contains(expectedContains));
}
}
@Test
public void test_jsonPut() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
String requestBodyJson = "{\"foo\":\"bar\"}";
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, requestBodyJson);
HttpRequestBase httpRequestBase = getHttpRequestBase();
HttpPut httpPut = (HttpPut) httpRequestBase;
HttpEntity httpEntity = httpPut.getEntity();
assertTrue("httpEntity was null", null != httpEntity);
}
@Test
public void test_jsonPut_null() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
String requestBodyJson = null;
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, "PUT", url, requestBodyJson);
HttpRequestBase httpRequestBase = getHttpRequestBase();
HttpPut httpPut = (HttpPut) httpRequestBase;
HttpEntity httpEntity = httpPut.getEntity();
assertTrue("httpEntity was expected null, but was "+httpEntity, null == httpEntity);
}
@Test
public void test_methods() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, ClassNotFoundException {
verifyApacheType("GET", HttpGet.class);
verifyApacheType("POST", HttpPost.class);
verifyApacheType("PUT", HttpPut.class);
verifyApacheType("DELETE", HttpDelete.class);
verifyApacheType("HEAD", HttpHead.class);
verifyApacheType("OPTIONS", HttpOptions.class);
verifyApacheType("TRACE", HttpTrace.class);
verifyApacheType("PATCH", HttpPatch.class);
try {
verifyApacheType("BROKENMETHOD", null);
fail("BROKENMETHOD should have thrown IllegalArgumentException, but didn't");
} catch (IllegalArgumentException e) {
// expected
String message = e.getMessage();
String expectedContains = "no support for request method=BROKENMETHOD";
assertTrue("expected contains "+expectedContains+", actual "+message, message.contains(expectedContains));
}
}
@Test
public void test_assignHttpClientDirectly() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
CloseableHttpClient mock = mock(CloseableHttpClient.class);
ApacheHttpClientProvider provider = (ApacheHttpClientProvider)ApacheHttpClientProvider.builder().setHttpClient(mock).build();
CloseableHttpClient fromProvider = extractHttpClient(provider);
assertTrue("client must be SAME object",mock==fromProvider);
}
@Test
public void test_setDoCloseToFalse() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, IOException {
CloseableHttpClient mock = mock(CloseableHttpClient.class);
ApacheHttpClientProvider provider = (ApacheHttpClientProvider)ApacheHttpClientProvider.builder()
.setHttpClient(mock)
.setDoCloseHttpClient(false).build();
provider.close();
verify(mock,times(0)).close();
}
@Test
public void test_setDoCloseToTrue() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, IOException {
CloseableHttpClient mock = mock(CloseableHttpClient.class);
ApacheHttpClientProvider provider = (ApacheHttpClientProvider)ApacheHttpClientProvider.builder()
.setHttpClient(mock).build();
provider.close();
verify(mock,times(1)).close();
}
@Test
public void test_setDoCloseToTrueAndHttpClientNull() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, IOException {
ApacheHttpClientProvider provider = (ApacheHttpClientProvider)ApacheHttpClientProvider.builder().setHttpClient(null).build();
provider.close();
}
protected static CloseableHttpClient extractHttpClient(ApacheHttpClientProvider provider) throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
Field httpClientField = ApacheHttpClientProvider.class.getDeclaredField("httpClient");
assertTrue("field was null", null != httpClientField);
httpClientField.setAccessible(true);
Object o = httpClientField.get(provider);
assertTrue("o was null", null != o);
assertTrue("o wasn't an HttpRequestBase", CloseableHttpClient.class.isAssignableFrom(o.getClass()));
return (CloseableHttpClient) o;
}
protected void verifyApacheType(String method, Class<?> clazz) throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, ClassNotFoundException {
httpRequest = httpProvider.getRequest(httpRequestAuthorizer, method, url, formParams);
Class<?> expectedType = Class.forName("com.here.account.http.apache.ApacheHttpClientProvider$ApacheHttpClientRequest");
Class<?> actualType = httpRequest.getClass();
assertTrue("httpRequest was not expected "+expectedType+", actual "+actualType, expectedType.equals(actualType));
HttpRequestBase o = getHttpRequestBase();
expectedType = clazz;
actualType = o.getClass();
assertTrue("o was wrong type, expected "+expectedType+", actual "+actualType, expectedType.equals(actualType));
}
protected HttpRequestBase getHttpRequestBase() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException {
Class<?> actualType = httpRequest.getClass();
Field field = actualType.getDeclaredField("httpRequestBase");
assertTrue("field was null", null != field);
field.setAccessible(true);
Object o = field.get(httpRequest);
assertTrue("o was null", null != o);
assertTrue("o wasn't an HttpRequestBase", HttpRequestBase.class.isAssignableFrom(o.getClass()));
return (HttpRequestBase) o;
}
@Test(expected = RequestExecutionException.class)
public void test_getToken_short_setConnectionTimeoutInMs() throws IOException {
HttpProvider httpProvider = ApacheHttpClientProvider.builder()
.setConnectionTimeoutInMs(1)
.build();
Socket5xxExponentialRandomBackoffPolicy socket5xxExponentialRandomBackoffPolicy =
new Socket5xxExponentialRandomBackoffPolicy();
RetryPolicy retryPolicy = new
RetryPolicy() {
@Override
public boolean shouldRetry(RetryContext retryContext) {
boolean shouldRetry = socket5xxExponentialRandomBackoffPolicy
.shouldRetry(retryContext);
assertTrue("shouldRetry was " + shouldRetry
+ " for " + socket5xxExponentialRandomBackoffPolicy
+ " with LastException: " + retryContext.getLastException(),
shouldRetry);
return false;
}
@Override
public int getNextRetryIntervalMillis(RetryContext retryContext) {
return 0;
}
};
try (
HereAccessTokenProvider accessTokenProvider = HereAccessTokenProvider.builder()
.setHttpProvider(httpProvider)
.setRetryPolicy(retryPolicy)
.build();
)
{
accessTokenProvider.getAccessToken();
}
}
}
| |
package API.amazon.mws.feeds.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://mws.amazonaws.com/doc/2009-01-01/}UpdateReportAcknowledgementsResult"/>
* <element ref="{http://mws.amazonaws.com/doc/2009-01-01/}ResponseMetadata"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
* Generated by AWS Code Generator
* <p/>
* Wed Feb 18 13:28:59 PST 2009
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"updateReportAcknowledgementsResult",
"responseMetadata"
})
@XmlRootElement(name = "UpdateReportAcknowledgementsResponse")
public class UpdateReportAcknowledgementsResponse {
@XmlElement(name = "UpdateReportAcknowledgementsResult", required = true)
protected UpdateReportAcknowledgementsResult updateReportAcknowledgementsResult;
@XmlElement(name = "ResponseMetadata", required = true)
protected ResponseMetadata responseMetadata;
/**
* Default constructor
*
*/
public UpdateReportAcknowledgementsResponse() {
super();
}
/**
* Value constructor
*
*/
public UpdateReportAcknowledgementsResponse(final UpdateReportAcknowledgementsResult updateReportAcknowledgementsResult, final ResponseMetadata responseMetadata) {
this.updateReportAcknowledgementsResult = updateReportAcknowledgementsResult;
this.responseMetadata = responseMetadata;
}
/**
* Gets the value of the updateReportAcknowledgementsResult property.
*
* @return
* possible object is
* {@link UpdateReportAcknowledgementsResult }
*
*/
public UpdateReportAcknowledgementsResult getUpdateReportAcknowledgementsResult() {
return updateReportAcknowledgementsResult;
}
/**
* Sets the value of the updateReportAcknowledgementsResult property.
*
* @param value
* allowed object is
* {@link UpdateReportAcknowledgementsResult }
*
*/
public void setUpdateReportAcknowledgementsResult(UpdateReportAcknowledgementsResult value) {
this.updateReportAcknowledgementsResult = value;
}
public boolean isSetUpdateReportAcknowledgementsResult() {
return (this.updateReportAcknowledgementsResult!= null);
}
/**
* Gets the value of the responseMetadata property.
*
* @return
* possible object is
* {@link ResponseMetadata }
*
*/
public ResponseMetadata getResponseMetadata() {
return responseMetadata;
}
/**
* Sets the value of the responseMetadata property.
*
* @param value
* allowed object is
* {@link ResponseMetadata }
*
*/
public void setResponseMetadata(ResponseMetadata value) {
this.responseMetadata = value;
}
public boolean isSetResponseMetadata() {
return (this.responseMetadata!= null);
}
/**
* Sets the value of the UpdateReportAcknowledgementsResult property.
*
* @param value
* @return
* this instance
*/
public UpdateReportAcknowledgementsResponse withUpdateReportAcknowledgementsResult(UpdateReportAcknowledgementsResult value) {
setUpdateReportAcknowledgementsResult(value);
return this;
}
/**
* Sets the value of the ResponseMetadata property.
*
* @param value
* @return
* this instance
*/
public UpdateReportAcknowledgementsResponse withResponseMetadata(ResponseMetadata value) {
setResponseMetadata(value);
return this;
}
@javax.xml.bind.annotation.XmlTransient
private ResponseHeaderMetadata responseHeaderMetadata;
public boolean isSetResponseHeaderMetadata() {
return this.responseHeaderMetadata != null;
}
public void setResponseHeaderMetadata(ResponseHeaderMetadata responseHeaderMetadata) {
this.responseHeaderMetadata = responseHeaderMetadata;
}
public ResponseHeaderMetadata getResponseHeaderMetadata() {
return responseHeaderMetadata;
}
/**
*
* XML string representation of this object
*
* @return XML String
*/
public String toXML() {
StringBuffer xml = new StringBuffer();
xml.append("<UpdateReportAcknowledgementsResponse xmlns=\"http://mws.amazonaws.com/doc/2009-01-01/\">");
if (isSetUpdateReportAcknowledgementsResult()) {
UpdateReportAcknowledgementsResult updateReportAcknowledgementsResult = getUpdateReportAcknowledgementsResult();
xml.append("<UpdateReportAcknowledgementsResult>");
xml.append(updateReportAcknowledgementsResult.toXMLFragment());
xml.append("</UpdateReportAcknowledgementsResult>");
}
if (isSetResponseMetadata()) {
ResponseMetadata responseMetadata = getResponseMetadata();
xml.append("<ResponseMetadata>");
xml.append(responseMetadata.toXMLFragment());
xml.append("</ResponseMetadata>");
}
xml.append("</UpdateReportAcknowledgementsResponse>");
return xml.toString();
}
/**
*
* Escape XML special characters
*/
private String escapeXML(String string) {
StringBuffer sb = new StringBuffer();
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '&':
sb.append("&");
break;
case '<':
sb.append("<");
break;
case '>':
sb.append(">");
break;
case '\'':
sb.append("'");
break;
case '"':
sb.append(""");
break;
default:
sb.append(c);
}
}
return sb.toString();
}
/**
*
* JSON string representation of this object
*
* @return JSON String
*/
public String toJSON() {
StringBuffer json = new StringBuffer();
json.append("{\"UpdateReportAcknowledgementsResponse\" : {");
json.append(quoteJSON("@xmlns"));
json.append(" : ");
json.append(quoteJSON("http://mws.amazonaws.com/doc/2009-01-01/"));
boolean first = true;
json.append(", ");
if (isSetUpdateReportAcknowledgementsResult()) {
if (!first) json.append(", ");
json.append("\"UpdateReportAcknowledgementsResult\" : {");
UpdateReportAcknowledgementsResult updateReportAcknowledgementsResult = getUpdateReportAcknowledgementsResult();
json.append(updateReportAcknowledgementsResult.toJSONFragment());
json.append("}");
first = false;
}
if (isSetResponseMetadata()) {
if (!first) json.append(", ");
json.append("\"ResponseMetadata\" : {");
ResponseMetadata responseMetadata = getResponseMetadata();
json.append(responseMetadata.toJSONFragment());
json.append("}");
first = false;
}
json.append("}");
json.append("}");
return json.toString();
}
/**
*
* Quote JSON string
*/
private String quoteJSON(String string) {
StringBuffer sb = new StringBuffer();
sb.append("\"");
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '"':
sb.append("\\\"");
break;
case '\\':
sb.append("\\\\");
break;
case '/':
sb.append("\\/");
break;
case '\b':
sb.append("\\b");
break;
case '\f':
sb.append("\\f");
break;
case '\n':
sb.append("\\n");
break;
case '\r':
sb.append("\\r");
break;
case '\t':
sb.append("\\t");
break;
default:
if (c < ' ') {
sb.append("\\u" + String.format("%03x", Integer.valueOf(c)));
} else {
sb.append(c);
}
}
}
sb.append("\"");
return sb.toString();
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mr_kitten;
/**
*
* @author Arya
*/
public class Interface extends javax.swing.JDialog {
private static Game g = new Game();
private boolean questDory,questBedroom,quest1 = false;
/**
* Creates new form Interface
*/
public Interface(java.awt.Frame parent, boolean modal) {
super(parent, modal);
initComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
YesNo = new javax.swing.JDialog();
jRadioButton1 = new javax.swing.JRadioButton();
jRadioButton2 = new javax.swing.JRadioButton();
jLabel2 = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
jTextArea1 = new javax.swing.JTextArea();
Shutdown = new javax.swing.JButton();
Explore = new javax.swing.JButton();
Help = new javax.swing.JButton();
go_up = new javax.swing.JButton();
go_down = new javax.swing.JButton();
go_west = new javax.swing.JButton();
go_east = new javax.swing.JButton();
go_north = new javax.swing.JButton();
go_south = new javax.swing.JButton();
Look = new javax.swing.JButton();
Items = new javax.swing.JButton();
jButtonA = new javax.swing.JButton();
jButtonB = new javax.swing.JButton();
jButtonC = new javax.swing.JButton();
jButtonD = new javax.swing.JButton();
jRadioButton1.setText("Oh Yes!");
jRadioButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jRadioButton1ActionPerformed(evt);
}
});
jRadioButton2.setText("Doesn't sound like Fun... No!");
jRadioButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jRadioButton2ActionPerformed(evt);
}
});
jLabel2.setText("Your call now!");
javax.swing.GroupLayout YesNoLayout = new javax.swing.GroupLayout(YesNo.getContentPane());
YesNo.getContentPane().setLayout(YesNoLayout);
YesNoLayout.setHorizontalGroup(
YesNoLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(YesNoLayout.createSequentialGroup()
.addGap(60, 60, 60)
.addComponent(jRadioButton1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 91, Short.MAX_VALUE)
.addComponent(jRadioButton2)
.addGap(21, 21, 21))
.addGroup(YesNoLayout.createSequentialGroup()
.addGap(152, 152, 152)
.addComponent(jLabel2)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
YesNoLayout.setVerticalGroup(
YesNoLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, YesNoLayout.createSequentialGroup()
.addGap(57, 57, 57)
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 78, Short.MAX_VALUE)
.addGroup(YesNoLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jRadioButton1)
.addComponent(jRadioButton2))
.addGap(128, 128, 128))
);
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/livingroom.png"))); // NOI18N
jTextArea1.setColumns(20);
jTextArea1.setRows(5);
jTextArea1.setName(""); // NOI18N
jScrollPane1.setViewportView(jTextArea1);
Shutdown.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/shutdown.png"))); // NOI18N
Shutdown.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
ShutdownMouseClicked(evt);
}
});
Explore.setText("Explore");
Explore.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
ExploreMouseClicked(evt);
}
});
Help.setText("Help");
Help.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
HelpMouseClicked(evt);
}
});
go_up.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_up.png"))); // NOI18N
go_up.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_upMouseClicked(evt);
}
});
go_down.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_down.png"))); // NOI18N
go_down.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_downMouseClicked(evt);
}
});
go_west.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_left.png"))); // NOI18N
go_west.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_westMouseClicked(evt);
}
});
go_east.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_right.png"))); // NOI18N
go_east.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_eastMouseClicked(evt);
}
});
go_north.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_up.png"))); // NOI18N
go_north.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_northMouseClicked(evt);
}
});
go_north.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
go_northActionPerformed(evt);
}
});
go_south.setIcon(new javax.swing.ImageIcon(getClass().getResource("/mr_kitten/Arrow_down.png"))); // NOI18N
go_south.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
go_southMouseClicked(evt);
}
});
Look.setText("Look");
Look.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
LookMouseClicked(evt);
}
});
Items.setText("Items");
Items.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
ItemsMouseClicked(evt);
}
});
jButtonA.setText("a");
jButtonA.setVisible(false);
jButtonA.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jButtonAMouseClicked(evt);
}
});
jButtonB.setText("b");
jButtonB.setVisible(false);
jButtonB.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jButtonBMouseClicked(evt);
}
});
jButtonC.setText("c");
jButtonC.setVisible(false);
jButtonC.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jButtonCMouseClicked(evt);
}
});
jButtonC.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonCActionPerformed(evt);
}
});
jButtonD.setText("d");
jButtonD.setVisible(false);
jButtonD.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jButtonDMouseClicked(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(Shutdown, javax.swing.GroupLayout.PREFERRED_SIZE, 45, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(297, 297, 297)
.addComponent(go_down, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1))
.addGroup(layout.createSequentialGroup()
.addGap(293, 293, 293)
.addComponent(go_up, javax.swing.GroupLayout.PREFERRED_SIZE, 50, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 449, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(Look))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addComponent(jButtonA)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(Explore))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addComponent(jButtonB)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(Items))))
.addGroup(layout.createSequentialGroup()
.addGap(230, 230, 230)
.addComponent(go_west, javax.swing.GroupLayout.PREFERRED_SIZE, 42, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(go_north)
.addComponent(go_south))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(go_east, javax.swing.GroupLayout.PREFERRED_SIZE, 41, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(241, 241, 241)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jButtonC)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(jButtonD)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(Help)))))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(9, 9, 9)
.addComponent(Shutdown, javax.swing.GroupLayout.PREFERRED_SIZE, 48, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 259, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonA)
.addComponent(Explore))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonB)
.addComponent(Items))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(Look)
.addComponent(jButtonC))
.addGap(12, 12, 12)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButtonD)
.addComponent(Help))
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(74, 74, 74)
.addComponent(go_east)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED))
.addGroup(layout.createSequentialGroup()
.addComponent(go_up, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(go_north)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(go_south))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addComponent(go_west, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(32, 32, 32)))))))
.addComponent(go_down, javax.swing.GroupLayout.PREFERRED_SIZE, 37, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(34, 34, 34))
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* Evenement when shut down with the mouse clicked
* @param evt
*/
private void ShutdownMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_ShutdownMouseClicked
System.exit(1);
// g.goRoom(new Command("go","north")); pour bouger de salle en fct de la direction avec les fleches
}//GEN-LAST:event_ShutdownMouseClicked
/**
* Evenement when click the explore button
* @param evt
*/
private void ExploreMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_ExploreMouseClicked
g.exploreRoomTest(this);
}//GEN-LAST:event_ExploreMouseClicked
/**
* Evenement when click the help button
* @param evt
*/
private void HelpMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_HelpMouseClicked
jTextArea1.setText(g.printHelp());
}//GEN-LAST:event_HelpMouseClicked
//DEPLACEMENT ENTRE LES ROOMS
private void go_upMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_upMouseClicked
g.goRoom(new Command("go", "up"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_upMouseClicked
private void go_downMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_downMouseClicked
g.goRoom(new Command("go", "down"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_downMouseClicked
private void go_westMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_westMouseClicked
g.goRoom(new Command("go", "west"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_westMouseClicked
private void go_eastMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_eastMouseClicked
g.goRoom(new Command ("go", "east"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_eastMouseClicked
private void go_northMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_northMouseClicked
g.goRoom(new Command("go", "north"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_northMouseClicked
private void go_southMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_go_southMouseClicked
g.goRoom(new Command("go", "south"));
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource(g.getCurrentRoom().getImage())));
}//GEN-LAST:event_go_southMouseClicked
private void go_northActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_go_northActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_go_northActionPerformed
private void jRadioButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButton2ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jRadioButton2ActionPerformed
private void jRadioButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButton1ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jRadioButton1ActionPerformed
/**
* Evenement when click with look button
* @param evt
*/
private void LookMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_LookMouseClicked
jTextArea1.setText((g.currentRoom.getDescription())); // TODO add your handling code here:
}//GEN-LAST:event_LookMouseClicked
/**
* Evenement when click with item button
* @param evt
*/
private void ItemsMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_ItemsMouseClicked
g.inventory(this);
}//GEN-LAST:event_ItemsMouseClicked
/**
* Evenement when click a A button
* @param evt
*/
private void jButtonAMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButtonAMouseClicked
String currentRoom = g.getCurrentRoom().getName();
//quest from living room
if(currentRoom.equals("livingRoom")){
if(quest1 == false) {
jTextArea1.setText(ExpInfo.printlivingRoom_answerA());
g.getPlayers().grabItem("home key");
jTextArea1.setText(ExpInfo.printlivingRoom_conclu());
quest1 = true;
}
else{
jTextArea1.setText("Hum...Yum !"
+ "\n You received the home key !");
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}
//quest from the kitchen
else if (currentRoom.equals("kitchen")){
jTextArea1.setText(ExpInfo.printKitchen_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the bedroom
else if (currentRoom.equals("bedroom")){
if (!questBedroom){
jTextArea1.setText(ExpInfo.printbedroom_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
else {
jTextArea1.setText(ExpInfo.printbedroom_answerCa());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}
//quest from the street1
else if (currentRoom.equals("street1")){
jTextArea1.setText(ExpInfo.printStreet1_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the street2
else if (currentRoom.equals("street2")){
jTextArea1.setText(ExpInfo.printStreet2_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the petshop
else if (currentRoom.equals("petshop")){
jTextArea1.setText(ExpInfo.printPetshop_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the dory room
else if (currentRoom.equals("dory")){
if (!questDory){
jTextArea1.setText(ExpInfo.printdory_answerA());
questDory = true;
}
else {
jTextArea1.setText(ExpInfo.printdory_goodanswerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
}
//quest from the fish palace
else if (currentRoom.equals("theFishPalace")){
jTextArea1.setText(ExpInfo.printfishpalace_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
g.getPlayers().grabItem("algea");
}
//quest from the star wars
else if (currentRoom.equals("starWars")){
jTextArea1.setText(ExpInfo.printvador_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the end
else if (currentRoom.equals("theEnd")){
jTextArea1.setText(ExpInfo.printend_answerA());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}//GEN-LAST:event_jButtonAMouseClicked
/**
* Evenement when click a B button
* @param evt
*/
private void jButtonBMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButtonBMouseClicked
String currentRoom = g.getCurrentRoom().getName();
//quest from living room
if(currentRoom.equals("livingRoom")){
if(quest1 == false) {
jTextArea1.setText(ExpInfo.printlivingRoom_answerB());
jTextArea1.setText(ExpInfo.printlivingRoom_conclu());
quest1 = true;
}
else{
jTextArea1.setText("Hum...Yum !");
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}
//quest from the kitchen
else if (currentRoom.equals("kitchen")){
jTextArea1.setText(ExpInfo.printKitchen_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the bedroom
else if (currentRoom.equals("bedroom")){
if (!questBedroom){
jTextArea1.setText(ExpInfo.printbedroom_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
else {
jTextArea1.setText(ExpInfo.printbedroom_answerCb());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}
//quest from the street1
else if (currentRoom.equals("street1")){
jTextArea1.setText(ExpInfo.printStreet1_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
g.getPlayers().grabItem("potionCareMin");
}
//quest from the street2
else if (currentRoom.equals("street2")){
jTextArea1.setText(ExpInfo.printStreet2_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the petshop
else if (currentRoom.equals("petshop")){
jTextArea1.setText(ExpInfo.printPetshop_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the dory room
else if (currentRoom.equals("dory")){
if (!questDory){
jTextArea1.setText(ExpInfo.printdory_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
else {
jTextArea1.setText(ExpInfo.printdory_goodanswerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
}
//quest from the fish palace
else if (currentRoom.equals("theFishPalace")){
jTextArea1.setText(ExpInfo.printfishpalace_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the star wars
else if (currentRoom.equals("starWars")){
jTextArea1.setText(ExpInfo.printvador_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
//quest from the end
else if (currentRoom.equals("theEnd")){
jTextArea1.setText(ExpInfo.printend_answerB());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
}
}//GEN-LAST:event_jButtonBMouseClicked
/**
* Evenement when click a C button
* @param evt
*/
private void jButtonCMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButtonCMouseClicked
String currentRoom = g.getCurrentRoom().getName();
//quest from bedroom
if(currentRoom.equals("bedroom")){
jTextArea1.setText(ExpInfo.printbedroom_answerC());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
questBedroom = true;
}
//quest from dory room
else if (currentRoom.equals("dory")){
if (!questDory){
jTextArea1.setText(ExpInfo.printdory_answerC());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
else {
jTextArea1.setText(ExpInfo.printdory_goodanswerC());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
g.getPlayers().grabItem("Artefact Of True Vision");
}
}
}//GEN-LAST:event_jButtonCMouseClicked
/**
* Evenement when click a D button
* @param evt
*/
private void jButtonDMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButtonDMouseClicked
String currentRoom = g.getCurrentRoom().getName();
//quest from bedroom
if(currentRoom.equals("bedroom")){
jTextArea1.setText(ExpInfo.printbedroom_answerD());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
//quest from dory room
else if (currentRoom.equals("dory")){
if (!questDory){
jTextArea1.setText(ExpInfo.printdory_answerD());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
else {
jTextArea1.setText(ExpInfo.printdory_goodanswerD());
jButtonA.setVisible(false);
jButtonB.setVisible(false);
jButtonC.setVisible(false);
jButtonD.setVisible(false);
}
}
}//GEN-LAST:event_jButtonDMouseClicked
private void jButtonCActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonCActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jButtonCActionPerformed
/*private javax.swing.JDialog getYesNo (){
YesNo.setVisible(true);
return YesNo;
}*/
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Interface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Interface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Interface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Interface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the dialog */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
Interface dialog = new Interface(new javax.swing.JFrame(), true);
dialog.addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent e) {
System.exit(0);
}
});
dialog.setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
public javax.swing.JButton Explore;
public javax.swing.JButton Help;
public javax.swing.JButton Items;
public javax.swing.JButton Look;
private javax.swing.JButton Shutdown;
private javax.swing.JDialog YesNo;
private javax.swing.JButton go_down;
private javax.swing.JButton go_east;
private javax.swing.JButton go_north;
private javax.swing.JButton go_south;
private javax.swing.JButton go_up;
private javax.swing.JButton go_west;
public javax.swing.JButton jButtonA;
public javax.swing.JButton jButtonB;
public javax.swing.JButton jButtonC;
public javax.swing.JButton jButtonD;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JRadioButton jRadioButton1;
private javax.swing.JRadioButton jRadioButton2;
private javax.swing.JScrollPane jScrollPane1;
public javax.swing.JTextArea jTextArea1;
// End of variables declaration//GEN-END:variables
}
| |
package org.apereo.cas.adaptors.u2f.storage;
import org.apereo.cas.configuration.model.support.mfa.u2f.U2FDynamoDbMultifactorProperties;
import org.apereo.cas.dynamodb.DynamoDbQueryBuilder;
import org.apereo.cas.dynamodb.DynamoDbTableUtils;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.DateTimeUtils;
import org.apereo.cas.util.LoggingUtils;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.tuple.Pair;
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
import software.amazon.awssdk.services.dynamodb.model.ComparisonOperator;
import software.amazon.awssdk.services.dynamodb.model.Condition;
import software.amazon.awssdk.services.dynamodb.model.CreateTableRequest;
import software.amazon.awssdk.services.dynamodb.model.DeleteItemRequest;
import software.amazon.awssdk.services.dynamodb.model.DeleteTableRequest;
import software.amazon.awssdk.services.dynamodb.model.DescribeTableRequest;
import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement;
import software.amazon.awssdk.services.dynamodb.model.KeyType;
import software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughput;
import software.amazon.awssdk.services.dynamodb.model.PutItemRequest;
import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType;
import software.amazon.awssdk.services.dynamodb.model.ScanRequest;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This is {@link U2FDynamoDbFacilitator}.
*
* @author Misagh Moayyed
* @since 6.3.0
*/
@Slf4j
public class U2FDynamoDbFacilitator {
private final U2FDynamoDbMultifactorProperties dynamoDbProperties;
private final DynamoDbClient amazonDynamoDBClient;
public U2FDynamoDbFacilitator(final U2FDynamoDbMultifactorProperties dynamoDbProperties,
final DynamoDbClient amazonDynamoDBClient) {
this.dynamoDbProperties = dynamoDbProperties;
this.amazonDynamoDBClient = amazonDynamoDBClient;
if (!dynamoDbProperties.isPreventTableCreationOnStartup()) {
createTable(dynamoDbProperties.isDropTablesOnStartup());
}
}
/**
* Create tables.
*
* @param deleteTables the delete tables
*/
@SneakyThrows
public void createTable(final boolean deleteTables) {
LOGGER.debug("Attempting to create DynamoDb table");
val throughput = ProvisionedThroughput.builder()
.readCapacityUnits(dynamoDbProperties.getReadCapacity())
.writeCapacityUnits(dynamoDbProperties.getWriteCapacity())
.build();
val request = CreateTableRequest.builder()
.attributeDefinitions(AttributeDefinition.builder()
.attributeName(ColumnNames.ID.getColumnName())
.attributeType(ScalarAttributeType.N).build())
.keySchema(KeySchemaElement.builder().attributeName(ColumnNames.ID.getColumnName()).keyType(KeyType.HASH).build())
.provisionedThroughput(throughput)
.tableName(dynamoDbProperties.getTableName())
.build();
if (deleteTables) {
val delete = DeleteTableRequest.builder().tableName(request.tableName()).build();
LOGGER.debug("Sending delete request [{}] to remove table if necessary", delete);
DynamoDbTableUtils.deleteTableIfExists(amazonDynamoDBClient, delete);
}
LOGGER.debug("Sending create request [{}] to create table", request);
DynamoDbTableUtils.createTableIfNotExists(amazonDynamoDBClient, request);
LOGGER.debug("Waiting until table [{}] becomes active...", request.tableName());
DynamoDbTableUtils.waitUntilActive(amazonDynamoDBClient, request.tableName());
val describeTableRequest = DescribeTableRequest.builder().tableName(request.tableName()).build();
LOGGER.debug("Sending request [{}] to obtain table description...", describeTableRequest);
val tableDescription = amazonDynamoDBClient.describeTable(describeTableRequest).table();
LOGGER.debug("Located newly created table with description: [{}]", tableDescription);
}
/**
* Fetch devices greater than or equal to date.
*
* @param expirationDate the expiration date
* @return the collection
*/
public Collection<? extends U2FDeviceRegistration> fetchDevicesFrom(final LocalDate expirationDate) {
val time = expirationDate.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
return getRecordsByKeys(DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.GE)
.attributeValue(List.of(AttributeValue.builder().n(String.valueOf(time)).build()))
.key(ColumnNames.CREATED_DATE.getColumnName())
.build());
}
/**
* Fetch devices.
*
* @param expirationDate the expiration date
* @param username the username
* @return the collection
*/
public Collection<? extends U2FDeviceRegistration> fetchDevicesFrom(final LocalDate expirationDate, final String username) {
val time = expirationDate.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
return getRecordsByKeys(
DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.GE)
.attributeValue(List.of(AttributeValue.builder().n(String.valueOf(time)).build()))
.key(ColumnNames.CREATED_DATE.getColumnName())
.build(),
DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.EQ)
.attributeValue(List.of(AttributeValue.builder().s(username).build()))
.key(ColumnNames.USERNAME.getColumnName())
.build());
}
/**
* Save u2f device registration.
*
* @param registration the registration
* @return the u2f device registration
*/
public U2FDeviceRegistration save(final U2FDeviceRegistration registration) {
val values = buildTableAttributeValuesMap(registration);
val putItemRequest = PutItemRequest.builder().tableName(dynamoDbProperties.getTableName()).item(values).build();
LOGGER.debug("Submitting put request [{}] for record [{}]", putItemRequest, registration);
val putItemResult = amazonDynamoDBClient.putItem(putItemRequest);
LOGGER.debug("Record added with result [{}]", putItemResult);
return registration;
}
/**
* Remove devices before or equal to date.
*
* @param expirationDate the expiration date
*/
public void removeDevicesBefore(final LocalDate expirationDate) {
val time = expirationDate.atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
val items = getRecordsByKeys(DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.LE)
.attributeValue(List.of(AttributeValue.builder().n(String.valueOf(time)).build()))
.key(ColumnNames.CREATED_DATE.getColumnName())
.build());
items.forEach(item -> {
val del = DeleteItemRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(),
AttributeValue.builder().n(String.valueOf(item.getId())).build()))
.build();
amazonDynamoDBClient.deleteItem(del);
});
}
/**
* Remove devices.
*/
public void removeDevices() {
createTable(true);
}
/**
* Remove device.
*
* @param username the username
* @param id the id
*/
public void removeDevice(final String username, final long id) {
val items = getRecordsByKeys(
DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.EQ)
.attributeValue(List.of(AttributeValue.builder().n(String.valueOf(id)).build()))
.key(ColumnNames.ID.getColumnName())
.build(),
DynamoDbQueryBuilder.builder()
.operator(ComparisonOperator.EQ)
.attributeValue(List.of(AttributeValue.builder().s(username).build()))
.key(ColumnNames.USERNAME.getColumnName())
.build());
items.forEach(item -> {
val del = DeleteItemRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(item.getId())).build()))
.build();
amazonDynamoDBClient.deleteItem(del);
});
}
/**
* Build table attribute values map.
*
* @param record the record
* @return the map
*/
private static Map<String, AttributeValue> buildTableAttributeValuesMap(final U2FDeviceRegistration record) {
val values = new HashMap<String, AttributeValue>();
values.put(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build());
values.put(ColumnNames.USERNAME.getColumnName(), AttributeValue.builder().s(record.getUsername()).build());
values.put(ColumnNames.RECORD.getColumnName(), AttributeValue.builder().s(record.getRecord()).build());
val time = record.getCreatedDate().atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
values.put(ColumnNames.CREATED_DATE.getColumnName(), AttributeValue.builder().n(String.valueOf(time)).build());
LOGGER.debug("Created attribute values [{}] based on [{}]", values, record);
return values;
}
@SneakyThrows
private Set<U2FDeviceRegistration> getRecordsByKeys(final DynamoDbQueryBuilder... queries) {
try {
var scanRequest = ScanRequest.builder()
.tableName(dynamoDbProperties.getTableName())
.scanFilter(Arrays.stream(queries)
.map(query -> {
val cond = Condition.builder().comparisonOperator(query.getOperator()).attributeValueList(query.getAttributeValue()).build();
return Pair.of(query.getKey(), cond);
})
.collect(Collectors.toMap(Pair::getKey, Pair::getValue)))
.build();
LOGGER.debug("Submitting request [{}] to get record with keys [{}]", scanRequest, queries);
val items = amazonDynamoDBClient.scan(scanRequest).items();
return items
.stream()
.map(item -> {
val id = Long.parseLong(item.get(ColumnNames.ID.getColumnName()).n());
val username = item.get(ColumnNames.USERNAME.getColumnName()).s();
val record = item.get(ColumnNames.RECORD.getColumnName()).s();
val time = Long.parseLong(item.get(ColumnNames.CREATED_DATE.getColumnName()).n());
return U2FDeviceRegistration.builder()
.id(id)
.username(username)
.record(record)
.createdDate(DateTimeUtils.localDateTime(time))
.build();
})
.sorted(Comparator.comparing(U2FDeviceRegistration::getCreatedDate))
.collect(Collectors.toCollection(LinkedHashSet::new));
} catch (final Exception e) {
LoggingUtils.error(LOGGER, e);
}
return new HashSet<>(0);
}
@Getter
@RequiredArgsConstructor
private enum ColumnNames {
ID("id"), USERNAME("username"), RECORD("record"), CREATED_DATE("createdDate");
private final String columnName;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
package org.bondlib;
import java.io.IOException;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
/**
* A type descriptor in the Bond type system that implements behavior specific to a particular type, such
* as schema inference, initialization, serialization and deserialization. A Bond type can be a primitive,
* an enum, a struct, or a specialization of a generic type (including containers, nullable, or generic
* Bond structs). Each Bond type is associated with a single {@link Class} instance of the value, although
* multiple Bond types can use the same value class (e.g. specializations of a generic type, or using the
* same Java type to represent multiple primitive types in Bond such as signed vs. unsigned integers).
* @param <T> the class of the value
*/
public abstract class BondType<T> {
// The global type cache, which maps a Bond type descriptor to itself and is used to cache type descriptor
// objects. Caching helps reducing memory footprint when working with generic types such as lists, nullables,
// or user-defined generic structs. The implementation of type descriptors and generated classes makes sure
// that there exists only one copy of each type descriptor (excluding short-lived temporary objects).
//
// Type descriptor caching is based on identity of a type descriptor, implemented by the equals and hashCode
// methods, and which consists of the following two items:
// 1. The Java class that implements the type descriptor (and thus inherits from BondType). The class
// identity alone is sufficient for type descriptors of non-generic Bond type since these are singletons.
// 2. For specializations of generic types, the list of type descriptors for the generic type arguments. Due
// to Java type erasure, a Class object alone can't provide information on the generic type arguments and
// hence this additional list is necessary to precisely identify a type.
//
// Type descriptors for native Bond non-generic types (primitive types and Blob) do not need to be cached
// since they are accessible from public static fields defined in the BondTypes class. The same is true about
// generated enum types, whose type descriptors are accessible from public static fields of the generated enum
// class. Therefore the type cache doesn't contain these type descriptors and implementation makes sure they
// are never added to the cache (to slightly reduce cache footprint and a chance of hash collisions).
//
// Type descriptors for non-generic generated struct types are also accessible from public static fields
// in their classes and it can be argued as above that they also do not need to be cached. However, these
// static fields are not used during initialization of struct type descriptors since they may not yet be
// initialized (e.g. a struct type may reference itself through a nullable field). Therefore, these type
// descriptors are obtained using protected StructBondType.getStructType method which uses the type cache,
// meaning that type descriptors for non-generic generated struct types are still cached.
//
// Type descriptors of all generic type specializations (either Bond native types or generated user-defined
// types) are always cached. Methods such as nullableOf, listOf, or makeGenericType always return a cached
// instance of the type descriptor.
private static final ConcurrentHashMap<BondType<?>, BondType<?>> typeCache =
new ConcurrentHashMap<BondType<?>, BondType<?>>();
/**
* Package-private constructor (extending BondType hierarchy by user code is not supported).
*/
BondType() {
}
/**
* Gets the simple name of the Bond type, which excludes the namespace.
* The names of generic type parameters, if any, are not included in the name.
*
* @return short simple type name
*/
public abstract String getName();
/**
* Gets the qualified name of the Bond type, which includes the namespace.
* The names of generic type parameters, if any, are not included in the name.
*
* @return short namespace-qualified type name
*/
public abstract String getQualifiedName();
/**
* Gets the full name of the Bond type, which includes namespaces and generic type parameters.
* This method is used in exception reporting.
*
* @return full type name
*/
public final String getFullName() {
StringBuilder sb = new StringBuilder();
sb.append(this.getQualifiedName());
BondType<?>[] typeArguments = this.getGenericTypeArguments();
if (typeArguments != null) {
sb.append("<");
sb.append(typeArguments[0].getFullName());
for (int i = 1; i < typeArguments.length; ++i) {
sb.append(", ");
sb.append(typeArguments[i].getFullName());
}
sb.append(">");
}
return sb.toString();
}
/**
* Gets the {@link BondDataType} value for this type.
*
* @return the bond data type enumeration value
*/
public abstract BondDataType getBondDataType();
/**
* Gets the {@link Class} instance for the values (objects) of this type.
* The actual class of a value of this Bond type may not be the same as the returned class,
* but it is always assignable to it (i.e. a subclass or a Java primitive type).
* For Bond types that are represented by Java primitive types, this method returns
* the class of the boxed value (e.g. {@link Long} for int64. To get the class object
* that actually represents the Java primitive type (i.e. {@link Long#TYPE} for int64),
* use methog {@link #getPrimitiveValueClass()} instead.
*
* @return the class object
*/
public abstract Class<T> getValueClass();
/**
* Gets the {@link Class} instance for the Java primitive values (non-objects) of this type
* or null if a Java primitive type does not exist. This method complements the
* {@link #getValueClass()} method which returns the classes for object instances.
* Thus, for example, this method returns {@link Long#TYPE} for Bond int64 data type whereas
* the other method returns the class object for the {@link Long} boxed type.
*
* @return the class object for a primitive type or null if a primitive type doesn't exists
*/
public abstract Class<T> getPrimitiveValueClass();
/**
* Gets a value indicating whether values of this type can be assigned to null.
* In Bond, only values explicitly declared as nullable can be assigned to the null value.
*
* @return whether value of this type can be assigned to null
*/
public abstract boolean isNullableType();
/**
* Indicates whether this type is a generic type (either a native Bond container or a user-defined struct)
* that is specialized with one or more generic type arguments. This method returns true if and only if
* the {@link #isGenericType()} method returns a non-empty array with one or more elements.
*
* @return true if this type is a specialization of a generic type
*/
public abstract boolean isGenericType();
/**
* Gets a new array instance containing the type descriptors of one or more generic type arguments or null
* if this type is not a specialization of a generic type. This method returns null if the type is not
* generic (i.e. if the {@link #isGenericType() returns false). Please note that this method never returns
* an empty array since a generic type must have at least one unbound generic type parameter.
*
* @return an array containing the descriptors of the generic type arguments or null if not a generic type
*/
public abstract BondType<?>[] getGenericTypeArguments();
/**
* Returns the default value of this type as a shared instance for immutable primitive types
* or a new instance for all other types. For non-nullable structs and containers this method
* returns an initialized value equivalent to invoking the public parameterless constructor
* for collections and non-generic structs, or the public single-argument constructor for generic
* structs. For nullable values this method always returns null.
*
* @return the default initialized value of this type
*/
protected abstract T newDefaultValue();
/**
* Returns a deep clone of the argument value.
* @param value the argument value.
* @return a deep clone of the argument.
*/
protected abstract T cloneValue(T value);
/**
* Serializes a value of this type into a protocol writer.
* This method is intended for objects and is not suitable for Java primitive (non-object) types
* since its argument is an object and would have to be boxed. To serialize primitive values use the
* static helper method defined in each singleton class for a primitive Java type.
*
* @param context contains the runtime context of the serialization
* @param value the value to serialize (boxed if necessary)
* @throws IOException if an I/O error occurred
*/
protected abstract void serializeValue(SerializationContext context, T value) throws IOException;
/**
* Deserializes a value of this type from a tagged protocol reader.
* This method is intended for objects and is not suitable for Java primitive (non-object) types
* since its return value is an object and would have to be unboxed. To deserialize primitive values use
* the static helper method defined in each singleton class for a primitive Java type.
*
* @param context contains the runtime context of the deserialization
* @return the deserialized value (boxed if necessary)
* @throws IOException if an I/O error occurred
*/
protected abstract T deserializeValue(TaggedDeserializationContext context) throws IOException;
/**
* Deserializes a value of this type from an untagged protocol reader and a typedef.
* This method is intended for objects and is not suitable for Java primitive (non-object) types
* since its return value is an object and would have to be unboxed. To deserialize primitive values use
* the static helper method defined in each singleton class for a primitive Java type.
*
* @param context contains the runtime context of the deserialization
* @param typeDef the typedef to use for deserialization
* @return the deserialized value (boxed if necessary)
* @throws IOException if an I/O error occurred
*/
protected abstract T deserializeValue(
UntaggedDeserializationContext context,
TypeDef typeDef) throws IOException;
/**
* Serializes a struct field of this type into a protocol writer, including field metadata.
* This method is intended for objects and is not suitable for fields with Java primitive (non-object)
* types since its argument is an object and would have to be boxed. To serialize fields with primitive
* values use the static helper method defined in each singleton class for a primitive type.
*
* @param context contains the runtime context of the serialization
* @param value the value to serialize (boxed if necessary)
* @param field descriptor of the field
* @throws IOException if an I/O error occurred
*/
protected abstract void serializeField(
SerializationContext context,
T value,
StructBondType.StructField<T> field) throws IOException;
/**
* Serializes a struct field of this type with "nothing" as the default value into a protocol writer,
* including field metadata. This method is intended for objects and is not suitable for fields with Java
* primitive (non-object) types since its argument is a generic {@link SomethingObject} for objects instead
* of more specific implementation for Java primitive types. To serialize fields with primitive values use
* the static helper method defined in each singleton class for a primitive type.
*
* @param context contains the runtime context of the serialization
* @param value the value to serialize
* @param field descriptor of the field
* @throws IOException if an I/O error occurred
*/
protected final void serializeSomethingField(
SerializationContext context,
SomethingObject<T> value,
StructBondType.StructField<T> field) throws IOException {
if (value != null) {
serializeField(context, value.getValue(), field);
} else if (!field.isOptional()) {
// throws
Throw.raiseNonOptionalFieldValueSetToNothingError(field);
}
}
/**
* Deserializes a struct field of this type from a tagged protocol reader, excluding field metadata
* which is assumed to be already deserialized earlier and whose value is available in the
* {@link TaggedDeserializationContext#readFieldResult} field of the passed context argument.
* This method is intended for objects and is not suitable for fields with Java primitive (non-object)
* types since its return value is an object and would have to be unboxed. To deserialize fields with primitive
* values use the static helper method defined in each singleton class for a primitive type.
*
* @param context contains the runtime context of the deserialization
* @param field descriptor of the field
* @return the deserialized value (boxed if necessary)
* @throws IOException if an I/O error occurred
*/
protected abstract T deserializeField(
TaggedDeserializationContext context,
StructBondType.StructField<T> field) throws IOException;
/**
* Deserializes a struct field of this type with "nothing" as the default value from a tagged protocol reader,
* excluding field metadata which is assumed to be already deserialized earlier and whose value is available
* in the {@link TaggedDeserializationContext#readFieldResult} field of the passed context argument.
* This method is intended for objects and is not suitable for fields with Java primitive (non-object)
* types since its return value is a generic {@link SomethingObject} for objects instead of more specific
* implementation for Java primitive types. To deserialize fields with primitive values use the static
* helper method defined in each singleton class for a primitive type.
*
* @param context contains the runtime context of the deserialization
* @param field descriptor of the field
* @return the deserialized value
* @throws IOException if an I/O error occurred
*/
protected final SomethingObject<T> deserializeSomethingField(
TaggedDeserializationContext context,
StructBondType.StructField<T> field) throws IOException {
return Something.wrap(this.deserializeField(context, field));
}
@Override
public final String toString() {
return this.getFullName();
}
/**
* Used when building {@link SchemaDef} objects. Contains tuple (struct def, zero-based position
* as it was discovered when traversing type tree).
*/
static final class StructDefOrdinalTuple {
final StructDef structDef;
final int ordinal;
StructDefOrdinalTuple(StructDef structDef, int ordinal) {
this.structDef = structDef;
this.ordinal = ordinal;
}
}
/**
* A helper to create schema that initializes a new type def instance and maintains a hash map
* of all distinct struct defs discovered so far.
* @param structDefMap maps struct bond types to their struct defs
* @return a new type def for the current Bond type
*/
abstract TypeDef createSchemaTypeDef(HashMap<StructBondType<?>, StructDefOrdinalTuple> structDefMap);
/**
* Checks if the argument value is null and throws an exception of it is.
* This method adds the information about the current type to the exception thrown,
* which signifies that the value was expected to be of the current type but was null.
* @param value the value
* @throws InvalidBondDataException if the value is null
*/
final void verifyNonNullableValueIsNotSetToNull(
T value) throws InvalidBondDataException {
if (value == null) {
Throw.raiseNonNullableValueSetToNullError(this.getFullName());
}
}
/**
* Checks if the argument value is null and throws an exception of it is.
* This method adds the information about the current type and the given field to the exception thrown,
* which signifies that the value of the field was expected to be of the current type but was null.
* @param value the value
* @param field the field
* @throws InvalidBondDataException if the value is null
*/
final void verifySerializedNonNullableFieldIsNotSetToNull(
T value,
StructBondType.StructField<T> field) throws InvalidBondDataException {
// delegate to the value verification method and chain the thrown exception (if any);
// this approach lets us preserve the original exception pertaining to the null value,
// wrapped by the exception specifically pertaining to the failure to serialize a field
try {
this.verifyNonNullableValueIsNotSetToNull(value);
} catch (InvalidBondDataException e) {
Throw.raiseStructFieldSerializationError(false, field, e, null);
}
}
/**
* Gets a type descriptor for the Bond "nullable" container type.
* Nullable values are represented by the same class but may be set to null.
*
* @param valueType a type descriptor for the underlying value class
* @param <TValue> the class of the underlying value
* @return a type descriptor instance
*/
public static <TValue> NullableBondType<TValue> nullableOf(
BondType<TValue> valueType) {
ArgumentHelper.ensureNotNull(valueType, "valueType");
return (NullableBondType<TValue>) getCachedType(new NullableBondType<TValue>(valueType));
}
/**
* Gets a type descriptor for the Bond "bonded" container type.
*
* @param valueType a type descriptor for the underlying value class
* @param <TStruct> the class of the underlying struct value
* @return a type descriptor instance
*/
public static <TStruct extends BondSerializable> BondedBondType<TStruct> bondedOf(
StructBondType<TStruct> valueType) {
ArgumentHelper.ensureNotNull(valueType, "valueType");
return (BondedBondType<TStruct>) getCachedType(new BondedBondType<TStruct>(valueType));
}
/**
* Gets a type descriptor for the Bond "bonded" container type. Throws an exception
* if the value type is not a Bond struct. This method is intended for generated code.
* This method returns {@link BondType} instead of more specific {@link BondedBondType},
* due to the constraint on the latter's generic type parameter which must be a struct
* type (i.e. derive from {@link StructBondType}. Since the type parameter TStruct is not
* contrained in this method, that generic type constraint can't be satisfied. It shall be
* noted however, that this method returns only instances of {@link BondedBondType} with
* a valid Bond struct value underneath. All other cases result in throwing an exception.
*
* @param valueType a type descriptor for the underlying value class
* @param <TStruct> the class of the underlying struct value
* @return a type descriptor instance
* @exception IllegalArgumentException if the argument is not a Bond struct type
*/
protected static <TStruct> BondType<Bonded<TStruct>> bondedOf(
BondType<TStruct> valueType) {
ArgumentHelper.ensureNotNull(valueType, "valueType");
if (!(valueType instanceof StructBondType)) {
Throw.raiseInvalidBondedValueTypeError(valueType);
}
// It's not possible to delegate to the public bondedOf method using a generic type, since that method
// constrains the TStruct type parameter. Thus, the call is made using non-generic type and the result
// is upcast to a generic type to match the method's return type.
@SuppressWarnings("unchecked")
BondType<Bonded<TStruct>> upcastBondedType = (BondType<Bonded<TStruct>>) bondedOf((StructBondType) valueType);
return upcastBondedType;
}
/**
* Gets a type descriptor for the Bond "vector" container type.
*
* @param elementType a type descriptor for the element value class
* @param <TElement> the class of the element values
* @return a type descriptor instance
*/
public static <TElement> VectorBondType<TElement> vectorOf(
BondType<TElement> elementType) {
ArgumentHelper.ensureNotNull(elementType, "elementType");
return (VectorBondType<TElement>) getCachedType(new VectorBondType<TElement>(elementType));
}
/**
* Gets a type descriptor for the Bond "list" container type.
*
* @param elementType a type descriptor for the element value class
* @param <TElement> the class of the element values
* @return a type descriptor instance
*/
public static <TElement> ListBondType<TElement> listOf(
BondType<TElement> elementType) {
ArgumentHelper.ensureNotNull(elementType, "elementType");
return (ListBondType<TElement>) getCachedType(new ListBondType<TElement>(elementType));
}
/**
* Gets a type descriptor for the Bond "set" container type.
*
* @param elementType a type descriptor for the element value class
* @param <TElement> the class of the element values
* @return a type descriptor instance
*/
public static <TElement> SetBondType<TElement> setOf(
PrimitiveBondType<TElement> elementType) {
ArgumentHelper.ensureNotNull(elementType, "elementType");
return (SetBondType<TElement>) getCachedType(new SetBondType<TElement>(elementType));
}
/**
* Gets a type descriptor for the Bond "set" container type. Throws an exception
* if the element type is not a primitive Bond type. This method is intended for generated code.
*
* @param elementType a type descriptor for the element value class
* @param <TElement> the class of the element values
* @return a type descriptor instance
* @exception IllegalArgumentException if the argument is not a primitive Bond type
*/
protected static <TElement> SetBondType<TElement> setOf(
BondType<TElement> elementType) {
ArgumentHelper.ensureNotNull(elementType, "elementType");
if (!(elementType instanceof PrimitiveBondType)) {
Throw.raiseInvalidSetElementTypeError(elementType);
}
return setOf((PrimitiveBondType<TElement>) elementType);
}
/**
* Gets a type descriptor for the Bond "map" container type.
*
* @param keyType a type descriptor for the map key class
* @param valueType a type descriptor for the mapped values class
* @param <TKey> the class of the map keys
* @param <TValue> the class of the mapped values
* @return a type descriptor instance
*/
public static <TKey, TValue> MapBondType<TKey, TValue> mapOf(
PrimitiveBondType<TKey> keyType, BondType<TValue> valueType) {
ArgumentHelper.ensureNotNull(keyType, "keyType");
ArgumentHelper.ensureNotNull(valueType, "valueType");
return (MapBondType<TKey, TValue>) getCachedType(new MapBondType<TKey, TValue>(keyType, valueType));
}
/**
* Gets a type descriptor for the Bond "map" container type. Throws an exception
* if the key type is not a primitive Bond type. This method is intended for generated code.
*
* @param keyType a type descriptor for the map key class
* @param valueType a type descriptor for the mapped values class
* @param <TKey> the class of the map keys
* @param <TValue> the class of the mapped values
* @return a type descriptor instance
* @exception IllegalArgumentException if the key type argument is not a primitive Bond type
*/
public static <TKey, TValue> MapBondType<TKey, TValue> mapOf(
BondType<TKey> keyType, BondType<TValue> valueType) {
ArgumentHelper.ensureNotNull(keyType, "keyType");
ArgumentHelper.ensureNotNull(valueType, "valueType");
if (!(keyType instanceof PrimitiveBondType)) {
Throw.raiseInvalidMapKeyTypeError(keyType);
}
return mapOf((PrimitiveBondType<TKey>) keyType, valueType);
}
/**
* Returns a cached type descriptor that is equal to the argument, or caches the argument otherwise.
* The method returns a non-null reference to the type descriptor that is equal to the argument
* (or the same as the argument if called for the first time with that class).
*
* @param type the type descriptor
* @param <T> the Bond value class
* @return a cached type descriptor, never null
*/
protected static <T> BondType<T> getCachedType(BondType<T> type) {
@SuppressWarnings("unchecked")
BondType<T> cachedValue = (BondType<T>) typeCache.putIfAbsent(type, type);
if (cachedValue == null) {
cachedValue = type;
}
return cachedValue;
}
/**
* Contains runtime state of serialization.
*/
protected static final class SerializationContext {
public final ProtocolWriter writer;
public SerializationContext(ProtocolWriter writer) {
this.writer = writer;
}
}
/**
* Contains runtime state of tagged deserialization.
*/
protected static final class TaggedDeserializationContext {
public final TaggedProtocolReader reader;
public final TaggedProtocolReader.ReadFieldResult readFieldResult =
new TaggedProtocolReader.ReadFieldResult();
public final TaggedProtocolReader.ReadContainerResult readContainerResult =
new TaggedProtocolReader.ReadContainerResult();
public TaggedDeserializationContext(TaggedProtocolReader reader) {
this.reader = reader;
}
}
/**
* Contains runtime state of untagged deserialization.
*/
protected static final class UntaggedDeserializationContext {
public final UntaggedProtocolReader reader;
public final SchemaDef schema;
public UntaggedDeserializationContext(UntaggedProtocolReader reader, SchemaDef schema) {
this.reader = reader;
this.schema = schema;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CdnConfiguration.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
/**
* A {@link CdnConfiguration} encapsulates information about where
* and how to ingest and deliver
* content enabled for DAI (Dynamic Ad Insertion).
*/
public class CdnConfiguration implements java.io.Serializable {
/* The unique ID of the {@link CdnConfiguration}. This value is
* read-only and is assigned by
* Google. */
private java.lang.Long id;
/* The name of the {@link CdnConfiguration}. This value is required
* to create a CDN configuration
* and has a maximum length of 255 characters. */
private java.lang.String name;
/* The type of CDN configuration represented by this {@link CdnConfiguration}.
* This value is
* required to create a CDN configuration */
private com.google.api.ads.admanager.axis.v202111.CdnConfigurationType cdnConfigurationType;
/* Parameters about this CDN configuration as a source of content.
* This facilitates fetching the
* original content for conditioning and delivering the
* original content as part of a modified
* stream. */
private com.google.api.ads.admanager.axis.v202111.SourceContentConfiguration sourceContentConfiguration;
/* The status of the CDN configuration. */
private com.google.api.ads.admanager.axis.v202111.CdnConfigurationStatus cdnConfigurationStatus;
public CdnConfiguration() {
}
public CdnConfiguration(
java.lang.Long id,
java.lang.String name,
com.google.api.ads.admanager.axis.v202111.CdnConfigurationType cdnConfigurationType,
com.google.api.ads.admanager.axis.v202111.SourceContentConfiguration sourceContentConfiguration,
com.google.api.ads.admanager.axis.v202111.CdnConfigurationStatus cdnConfigurationStatus) {
this.id = id;
this.name = name;
this.cdnConfigurationType = cdnConfigurationType;
this.sourceContentConfiguration = sourceContentConfiguration;
this.cdnConfigurationStatus = cdnConfigurationStatus;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("cdnConfigurationStatus", getCdnConfigurationStatus())
.add("cdnConfigurationType", getCdnConfigurationType())
.add("id", getId())
.add("name", getName())
.add("sourceContentConfiguration", getSourceContentConfiguration())
.toString();
}
/**
* Gets the id value for this CdnConfiguration.
*
* @return id * The unique ID of the {@link CdnConfiguration}. This value is
* read-only and is assigned by
* Google.
*/
public java.lang.Long getId() {
return id;
}
/**
* Sets the id value for this CdnConfiguration.
*
* @param id * The unique ID of the {@link CdnConfiguration}. This value is
* read-only and is assigned by
* Google.
*/
public void setId(java.lang.Long id) {
this.id = id;
}
/**
* Gets the name value for this CdnConfiguration.
*
* @return name * The name of the {@link CdnConfiguration}. This value is required
* to create a CDN configuration
* and has a maximum length of 255 characters.
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the name value for this CdnConfiguration.
*
* @param name * The name of the {@link CdnConfiguration}. This value is required
* to create a CDN configuration
* and has a maximum length of 255 characters.
*/
public void setName(java.lang.String name) {
this.name = name;
}
/**
* Gets the cdnConfigurationType value for this CdnConfiguration.
*
* @return cdnConfigurationType * The type of CDN configuration represented by this {@link CdnConfiguration}.
* This value is
* required to create a CDN configuration
*/
public com.google.api.ads.admanager.axis.v202111.CdnConfigurationType getCdnConfigurationType() {
return cdnConfigurationType;
}
/**
* Sets the cdnConfigurationType value for this CdnConfiguration.
*
* @param cdnConfigurationType * The type of CDN configuration represented by this {@link CdnConfiguration}.
* This value is
* required to create a CDN configuration
*/
public void setCdnConfigurationType(com.google.api.ads.admanager.axis.v202111.CdnConfigurationType cdnConfigurationType) {
this.cdnConfigurationType = cdnConfigurationType;
}
/**
* Gets the sourceContentConfiguration value for this CdnConfiguration.
*
* @return sourceContentConfiguration * Parameters about this CDN configuration as a source of content.
* This facilitates fetching the
* original content for conditioning and delivering the
* original content as part of a modified
* stream.
*/
public com.google.api.ads.admanager.axis.v202111.SourceContentConfiguration getSourceContentConfiguration() {
return sourceContentConfiguration;
}
/**
* Sets the sourceContentConfiguration value for this CdnConfiguration.
*
* @param sourceContentConfiguration * Parameters about this CDN configuration as a source of content.
* This facilitates fetching the
* original content for conditioning and delivering the
* original content as part of a modified
* stream.
*/
public void setSourceContentConfiguration(com.google.api.ads.admanager.axis.v202111.SourceContentConfiguration sourceContentConfiguration) {
this.sourceContentConfiguration = sourceContentConfiguration;
}
/**
* Gets the cdnConfigurationStatus value for this CdnConfiguration.
*
* @return cdnConfigurationStatus * The status of the CDN configuration.
*/
public com.google.api.ads.admanager.axis.v202111.CdnConfigurationStatus getCdnConfigurationStatus() {
return cdnConfigurationStatus;
}
/**
* Sets the cdnConfigurationStatus value for this CdnConfiguration.
*
* @param cdnConfigurationStatus * The status of the CDN configuration.
*/
public void setCdnConfigurationStatus(com.google.api.ads.admanager.axis.v202111.CdnConfigurationStatus cdnConfigurationStatus) {
this.cdnConfigurationStatus = cdnConfigurationStatus;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CdnConfiguration)) return false;
CdnConfiguration other = (CdnConfiguration) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.id==null && other.getId()==null) ||
(this.id!=null &&
this.id.equals(other.getId()))) &&
((this.name==null && other.getName()==null) ||
(this.name!=null &&
this.name.equals(other.getName()))) &&
((this.cdnConfigurationType==null && other.getCdnConfigurationType()==null) ||
(this.cdnConfigurationType!=null &&
this.cdnConfigurationType.equals(other.getCdnConfigurationType()))) &&
((this.sourceContentConfiguration==null && other.getSourceContentConfiguration()==null) ||
(this.sourceContentConfiguration!=null &&
this.sourceContentConfiguration.equals(other.getSourceContentConfiguration()))) &&
((this.cdnConfigurationStatus==null && other.getCdnConfigurationStatus()==null) ||
(this.cdnConfigurationStatus!=null &&
this.cdnConfigurationStatus.equals(other.getCdnConfigurationStatus())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getId() != null) {
_hashCode += getId().hashCode();
}
if (getName() != null) {
_hashCode += getName().hashCode();
}
if (getCdnConfigurationType() != null) {
_hashCode += getCdnConfigurationType().hashCode();
}
if (getSourceContentConfiguration() != null) {
_hashCode += getSourceContentConfiguration().hashCode();
}
if (getCdnConfigurationStatus() != null) {
_hashCode += getCdnConfigurationStatus().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CdnConfiguration.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CdnConfiguration"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("id");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "id"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("name");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "name"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("cdnConfigurationType");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "cdnConfigurationType"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CdnConfigurationType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("sourceContentConfiguration");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "sourceContentConfiguration"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SourceContentConfiguration"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("cdnConfigurationStatus");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "cdnConfigurationStatus"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CdnConfigurationStatus"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.ShapeNodeEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.swt.graphics.Color;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AdditionalOutputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EastPointerShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.SendMediatorEndpointOutputConnectorItemSemanticEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
/**
* @generated NOT
*/
public class SendMediatorEndpointOutputConnectorEditPart extends AdditionalOutputConnector {
/**
* @generated
*/
public static final int VISUAL_ID = 3539;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
protected IFigure primaryShape;
/**
* @generated
*/
public SendMediatorEndpointOutputConnectorEditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE,
new SendMediatorEndpointOutputConnectorItemSemanticEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
// XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable
// editpolicies
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
}
/**
* @generated
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated
*/
protected IFigure createNodeShape() {
return primaryShape = new EastPointerFigure();
}
/**
* @generated
*/
public EastPointerFigure getPrimaryShape() {
return (EastPointerFigure) primaryShape;
}
/**
* @generated
*/
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(12, 10);
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model
* so you may safely remove <i>generated</i> tag and modify it.
*
* @generated NOT
*/
protected NodeFigure createNodeFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShapeForward();
figure.add(shape);
contentPane = setupContentPane(shape);
figure_ = figure;
createNodeShapeReverse();
return figure;
}
/**
* Default implementation treats passed figure as content pane.
* Respects layout one may have set for generated figure.
*
* @param nodeShape instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated
*/
public List<IElementType> getMARelTypesOnSource() {
ArrayList<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.EsbLink_4001);
return types;
}
/**
* @generated
*/
public List<IElementType> getMARelTypesOnSourceAndTarget(IGraphicalEditPart targetEditPart) {
LinkedList<IElementType> types = new LinkedList<IElementType>();
if (targetEditPart instanceof ProxyInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ProxyFaultInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DropMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PropertyMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof PropertyGroupMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ThrottleMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FilterMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LogMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EnrichMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof XSLTMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SwitchMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SequenceInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EventMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof EntitlementMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ClassMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SpringMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof ScriptMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FaultMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof XQueryMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CommandMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DBLookupMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DBReportMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SmooksMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof SendMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof DefaultEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AddressEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof FailoverEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof WSDLEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof LoadBalanceEndPointInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof HeaderMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CloneMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CacheMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof IterateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof CalloutMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof TransactionMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RMSequenceMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof RuleMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof OAuthMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof AggregateMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MessageInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MergeNodeFirstInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof MergeNodeSecondInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
if (targetEditPart instanceof JsonTransformMediatorInputConnectorEditPart) {
types.add(EsbElementTypes.EsbLink_4001);
}
return types;
}
/**
* @generated
*/
public List<IElementType> getMATypesForTarget(IElementType relationshipType) {
LinkedList<IElementType> types = new LinkedList<IElementType>();
if (relationshipType == EsbElementTypes.EsbLink_4001) {
types.add(EsbElementTypes.ProxyInputConnector_3003);
types.add(EsbElementTypes.ProxyFaultInputConnector_3489);
types.add(EsbElementTypes.DropMediatorInputConnector_3008);
types.add(EsbElementTypes.PropertyMediatorInputConnector_3033);
types.add(EsbElementTypes.PropertyGroupMediatorInputConnector_3789);
types.add(EsbElementTypes.ThrottleMediatorInputConnector_3121);
types.add(EsbElementTypes.FilterMediatorInputConnector_3010);
types.add(EsbElementTypes.LogMediatorInputConnector_3018);
types.add(EsbElementTypes.EnrichMediatorInputConnector_3036);
types.add(EsbElementTypes.XSLTMediatorInputConnector_3039);
types.add(EsbElementTypes.SwitchMediatorInputConnector_3042);
types.add(EsbElementTypes.SequenceInputConnector_3049);
types.add(EsbElementTypes.EventMediatorInputConnector_3052);
types.add(EsbElementTypes.EntitlementMediatorInputConnector_3055);
types.add(EsbElementTypes.ClassMediatorInputConnector_3058);
types.add(EsbElementTypes.SpringMediatorInputConnector_3061);
types.add(EsbElementTypes.ScriptMediatorInputConnector_3064);
types.add(EsbElementTypes.FaultMediatorInputConnector_3067);
types.add(EsbElementTypes.XQueryMediatorInputConnector_3070);
types.add(EsbElementTypes.CommandMediatorInputConnector_3073);
types.add(EsbElementTypes.DBLookupMediatorInputConnector_3076);
types.add(EsbElementTypes.DBReportMediatorInputConnector_3079);
types.add(EsbElementTypes.SmooksMediatorInputConnector_3082);
types.add(EsbElementTypes.SendMediatorInputConnector_3085);
types.add(EsbElementTypes.DefaultEndPointInputConnector_3021);
types.add(EsbElementTypes.AddressEndPointInputConnector_3030);
types.add(EsbElementTypes.FailoverEndPointInputConnector_3088);
types.add(EsbElementTypes.WSDLEndPointInputConnector_3092);
types.add(EsbElementTypes.LoadBalanceEndPointInputConnector_3095);
types.add(EsbElementTypes.HeaderMediatorInputConnector_3100);
types.add(EsbElementTypes.CloneMediatorInputConnector_3103);
types.add(EsbElementTypes.CacheMediatorInputConnector_3106);
types.add(EsbElementTypes.IterateMediatorInputConnector_3109);
types.add(EsbElementTypes.CalloutMediatorInputConnector_3115);
types.add(EsbElementTypes.TransactionMediatorInputConnector_3118);
types.add(EsbElementTypes.RMSequenceMediatorInputConnector_3124);
types.add(EsbElementTypes.RuleMediatorInputConnector_3127);
types.add(EsbElementTypes.OAuthMediatorInputConnector_3130);
types.add(EsbElementTypes.AggregateMediatorInputConnector_3112);
types.add(EsbElementTypes.MessageInputConnector_3046);
types.add(EsbElementTypes.MergeNodeFirstInputConnector_3014);
types.add(EsbElementTypes.MergeNodeSecondInputConnector_3015);
types.add(EsbElementTypes.JsonTransformMediatorInputConnector_3792);
}
return types;
}
/**
* @generated
*/
public class EastPointerFigure extends EastPointerShape {
/**
* @generated
*/
public EastPointerFigure() {
this.setBackgroundColor(THIS_BACK);
this.setPreferredSize(new Dimension(getMapMode().DPtoLP(12), getMapMode().DPtoLP(10)));
}
}
/**
* @generated
*/
static final Color THIS_BACK = new Color(null, 50, 50, 50);
}
| |
// "Therefore those skilled at the unorthodox
// are infinite as heaven and earth,
// inexhaustible as the great rivers.
// When they come to an end,
// they begin again,
// like the days and months;
// they die and are reborn,
// like the four seasons."
//
// - Sun Tsu,
// "The Art of War"
package com.theartofdev.fastimageloader;
import android.app.Application;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Point;
import android.view.Display;
import android.view.WindowManager;
import com.theartofdev.fastimageloader.impl.util.FILUtils;
/**
* Builder for creating {@link com.theartofdev.fastimageloader.ImageLoadSpec} instances.
* <br><br>
* Defaults:<br>
* Format - JPEG<br>
* Max Density - 1.5<br>
* Pixel Config - ARGB_8888<br>
*/
public final class ImageLoadSpecBuilder {
//region: Fields and Consts
/**
* the unique key of the spec used for identification and debug
*/
private String mKey;
/**
* The application object
*/
private Application mApplication;
/**
* the width of the image in pixels
*/
private int mWidth = -1;
/**
* the height of the image in pixels
*/
private int mHeight = -1;
/**
* the max pixel per inch deviceDensity to load the image in
*/
private float mMaxDensity = 1.5f;
/**
* The format of the image.
*/
private ImageLoadSpec.Format mFormat = ImageLoadSpec.Format.JPEG;
/**
* the pixel configuration to load the image in (4 bytes per image pixel, 2 bytes, etc.)
*/
private Bitmap.Config mPixelConfig = Bitmap.Config.ARGB_8888;
/**
* The URI enhancer to use for this spec image loading
*/
private ImageServiceAdapter mImageServiceAdapter;
//endregion
/**
* @param key the unique key of the spec used for identification and debug
* @param application The application object
* @param imageServiceAdapter default URI enhancer to use for this spec image loading
*/
ImageLoadSpecBuilder(String key, Application application, ImageServiceAdapter imageServiceAdapter) {
FILUtils.notNullOrEmpty(key, "key");
FILUtils.notNull(application, "application");
FILUtils.notNull(imageServiceAdapter, "imageServiceAdapter");
mKey = key;
mApplication = application;
mImageServiceAdapter = imageServiceAdapter;
}
/**
* Get the display size of the device.
*/
public Point getDisplaySize() {
Point p = new Point();
Display display = ((WindowManager) mApplication.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
display.getSize(p);
return p;
}
/**
* The format of the image to download.
*/
public ImageLoadSpecBuilder setFormat(ImageLoadSpec.Format format) {
mFormat = format;
return this;
}
/**
* the pixel configuration to load the image in (4 bytes per image pixel, 2 bytes, etc.)
*/
public ImageLoadSpecBuilder setPixelConfig(Bitmap.Config pixelConfig) {
mPixelConfig = pixelConfig;
return this;
}
/**
* the width and height of the image in pixels to the size of the screen.
*/
public ImageLoadSpecBuilder setDimensionByDisplay() {
Point size = getDisplaySize();
mWidth = size.x;
mHeight = size.y;
return this;
}
/**
* the width and height of the image to unbound, will be the size of the downloaded image.
*/
public ImageLoadSpecBuilder setUnboundDimension() {
mWidth = 0;
mHeight = 0;
return this;
}
/**
* the width and height of the image in pixels to the same value (square).
*/
public ImageLoadSpecBuilder setDimension(int size) {
mWidth = size;
mHeight = size;
return this;
}
/**
* the width and height of the image in pixels.<br>
* to set one dimension and the second to scale set the second to 0.
*/
public ImageLoadSpecBuilder setDimension(int width, int height) {
mWidth = width;
mHeight = height;
return this;
}
/**
* the width of the image in pixels.<br>
* to set the height to scale set it to 0.
*/
public ImageLoadSpecBuilder setWidth(int width) {
mWidth = width;
return this;
}
/**
* the height of the image in pixels.<br>
* to set the width to scale set it to 0.
*/
public ImageLoadSpecBuilder setHeight(int height) {
mHeight = height;
return this;
}
/**
* the width and height of the image in pixels to the same value (square).
*/
public ImageLoadSpecBuilder setDimensionByResource(int resId) {
mWidth = mHeight = mApplication.getResources().getDimensionPixelSize(resId);
return this;
}
/**
* the width and height of the image in pixels.<br>
* to set one dimension and the second to scale set the second to 0.
*/
public ImageLoadSpecBuilder setDimensionByResource(int widthResId, int heightResId) {
mWidth = mApplication.getResources().getDimensionPixelSize(widthResId);
mHeight = mApplication.getResources().getDimensionPixelSize(heightResId);
return this;
}
/**
* the width of the image by reading dimension resource by the given key.<br>
* to set the height to scale set it to 0.
*/
public ImageLoadSpecBuilder setWidthByResource(int resId) {
mWidth = mApplication.getResources().getDimensionPixelSize(resId);
return this;
}
/**
* the height of the image by reading dimension resource by the given key.<br>
* to set the width to scale set it to 0.
*/
public ImageLoadSpecBuilder setHeightByResource(int resId) {
mHeight = mApplication.getResources().getDimensionPixelSize(resId);
return this;
}
/**
* set the max pixel per inch deviceDensity to the device deviceDensity
*/
public ImageLoadSpecBuilder setMaxDensity() {
mMaxDensity = 9999;
return this;
}
/**
* the max pixel per inch deviceDensity to load the image in
*
* @throws IllegalArgumentException if value if < 0.5
*/
public ImageLoadSpecBuilder setMaxDensity(float maxDensity) {
if (maxDensity <= 0.5)
throw new IllegalArgumentException("max density must be > .5");
mMaxDensity = maxDensity;
return this;
}
/**
* The URI enhancer to use for this spec image loading
*/
public ImageLoadSpecBuilder setImageServiceAdapter(ImageServiceAdapter imageServiceAdapter) {
mImageServiceAdapter = imageServiceAdapter;
return this;
}
/**
* Create spec by set parameters.
*
* @throws IllegalArgumentException width or height not set correctly.
*/
public ImageLoadSpec build() {
if (mWidth < 0 || mHeight < 0)
throw new IllegalArgumentException("width and height must be set");
float deviceDensity = mApplication.getResources().getDisplayMetrics().density;
float densityAdj = deviceDensity >= mMaxDensity ? mMaxDensity / deviceDensity : 1f;
ImageLoadSpec spec = new ImageLoadSpec(mKey, (int) (mWidth * densityAdj), (int) (mHeight * densityAdj), mFormat, mPixelConfig, mImageServiceAdapter);
FastImageLoader.addSpec(spec);
return spec;
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2002,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dawn.bgSys.common.password;
/**
* This class provides encode/decode for RFC 2045 Base64 as
* defined by RFC 2045, N. Freed and N. Borenstein.
* RFC 2045: Multipurpose Internet Mail Extensions (MIME)
* Part One: Format of Internet Message Bodies. Reference
* 1996 Available at: http://www.ietf.org/rfc/rfc2045.txt
* This class is used by XML Schema binary format validation
* <p/>
* This implementation does not encode/decode streaming
* data. You need the data that you will encode/decode
* already on a byte arrray.
*
* @author Jeffrey Rodriguez
* @author Sandy Gao
* @xerces.internal
*/
public final class Base64Utils {
static private final int BASELENGTH = 128;
static private final int LOOKUPLENGTH = 64;
static private final int TWENTYFOURBITGROUP = 24;
static private final int EIGHTBIT = 8;
static private final int SIXTEENBIT = 16;
static private final int SIXBIT = 6;
static private final int FOURBYTE = 4;
static private final int SIGN = -128;
static private final char PAD = '=';
static private final boolean fDebug = false;
static final private byte[] base64Alphabet = new byte[BASELENGTH];
static final private char[] lookUpBase64Alphabet = new char[LOOKUPLENGTH];
static {
for (int i = 0; i < BASELENGTH; ++i) {
base64Alphabet[i] = -1;
}
for (int i = 'Z'; i >= 'A'; i--) {
base64Alphabet[i] = (byte) (i - 'A');
}
for (int i = 'z'; i >= 'a'; i--) {
base64Alphabet[i] = (byte) (i - 'a' + 26);
}
for (int i = '9'; i >= '0'; i--) {
base64Alphabet[i] = (byte) (i - '0' + 52);
}
base64Alphabet['+'] = 62;
base64Alphabet['/'] = 63;
for (int i = 0; i <= 25; i++)
lookUpBase64Alphabet[i] = (char) ('A' + i);
for (int i = 26, j = 0; i <= 51; i++, j++)
lookUpBase64Alphabet[i] = (char) ('a' + j);
for (int i = 52, j = 0; i <= 61; i++, j++)
lookUpBase64Alphabet[i] = (char) ('0' + j);
lookUpBase64Alphabet[62] = (char) '+';
lookUpBase64Alphabet[63] = (char) '/';
}
protected static boolean isWhiteSpace(char octect) {
return (octect == 0x20 || octect == 0xd || octect == 0xa || octect == 0x9);
}
protected static boolean isPad(char octect) {
return (octect == PAD);
}
protected static boolean isData(char octect) {
return (octect < BASELENGTH && base64Alphabet[octect] != -1);
}
protected static boolean isBase64(char octect) {
return (isWhiteSpace(octect) || isPad(octect) || isData(octect));
}
/**
* Encodes hex octects into Base64
*
* @param binaryData Array containing binaryData
* @return Encoded Base64 array
*/
public static String encode(byte[] binaryData) {
if (binaryData == null)
return null;
int lengthDataBits = binaryData.length * EIGHTBIT;
if (lengthDataBits == 0) {
return "";
}
int fewerThan24bits = lengthDataBits % TWENTYFOURBITGROUP;
int numberTriplets = lengthDataBits / TWENTYFOURBITGROUP;
int numberQuartet = fewerThan24bits != 0 ? numberTriplets + 1 : numberTriplets;
char encodedData[] = null;
encodedData = new char[numberQuartet * 4];
byte k = 0, l = 0, b1 = 0, b2 = 0, b3 = 0;
int encodedIndex = 0;
int dataIndex = 0;
if (fDebug) {
System.out.println("number of triplets = " + numberTriplets);
}
for (int i = 0; i < numberTriplets; i++) {
b1 = binaryData[dataIndex++];
b2 = binaryData[dataIndex++];
b3 = binaryData[dataIndex++];
if (fDebug) {
System.out.println("b1= " + b1 + ", b2= " + b2 + ", b3= " + b3);
}
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
byte val3 = ((b3 & SIGN) == 0) ? (byte) (b3 >> 6) : (byte) ((b3) >> 6 ^ 0xfc);
if (fDebug) {
System.out.println("val2 = " + val2);
System.out.println("k4 = " + (k << 4));
System.out.println("vak = " + (val2 | (k << 4)));
}
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[(l << 2) | val3];
encodedData[encodedIndex++] = lookUpBase64Alphabet[b3 & 0x3f];
}
// form integral number of 6-bit groups
if (fewerThan24bits == EIGHTBIT) {
b1 = binaryData[dataIndex];
k = (byte) (b1 & 0x03);
if (fDebug) {
System.out.println("b1=" + b1);
System.out.println("b1<<2 = " + (b1 >> 2));
}
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[k << 4];
encodedData[encodedIndex++] = PAD;
encodedData[encodedIndex++] = PAD;
} else if (fewerThan24bits == SIXTEENBIT) {
b1 = binaryData[dataIndex];
b2 = binaryData[dataIndex + 1];
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[l << 2];
encodedData[encodedIndex++] = PAD;
}
return new String(encodedData);
}
/**
* Decodes Base64 data into octects
*
* @param encoded string containing Base64 data
* @return Array containind decoded data.
*/
public static byte[] decode(String encoded) {
if (encoded == null)
return null;
encoded=encoded.replace("-","/").replace("_","+");
char[] base64Data = encoded.toCharArray();
// remove white spaces
int len = removeWhiteSpace(base64Data);
if (len % FOURBYTE != 0) {
return null;//should be divisible by four
}
int numberQuadruple = (len / FOURBYTE);
if (numberQuadruple == 0)
return new byte[0];
byte decodedData[] = null;
byte b1 = 0, b2 = 0, b3 = 0, b4 = 0;
char d1 = 0, d2 = 0, d3 = 0, d4 = 0;
int i = 0;
int encodedIndex = 0;
int dataIndex = 0;
decodedData = new byte[(numberQuadruple) * 3];
for (; i < numberQuadruple - 1; i++) {
if (!isData((d1 = base64Data[dataIndex++])) ||
!isData((d2 = base64Data[dataIndex++])) ||
!isData((d3 = base64Data[dataIndex++])) ||
!isData((d4 = base64Data[dataIndex++])))
return null;//if found "no data" just return null
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
if (!isData((d1 = base64Data[dataIndex++])) ||
!isData((d2 = base64Data[dataIndex++]))) {
return null;//if found "no data" just return null
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
d3 = base64Data[dataIndex++];
d4 = base64Data[dataIndex++];
if (!isData((d3)) ||
!isData((d4))) {//Check if they are PAD characters
if (isPad(d3) && isPad(d4)) { //Two PAD e.g. 3c[Pad][Pad]
if ((b2 & 0xf) != 0)//last 4 bits should be zero
return null;
byte[] tmp = new byte[i * 3 + 1];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex] = (byte) (b1 << 2 | b2 >> 4);
return tmp;
} else if (!isPad(d3) && isPad(d4)) { //One PAD e.g. 3cQ[Pad]
b3 = base64Alphabet[d3];
if ((b3 & 0x3) != 0)//last 2 bits should be zero
return null;
byte[] tmp = new byte[i * 3 + 2];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
tmp[encodedIndex] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
return tmp;
} else {
return null;//an error like "3c[Pad]r", "3cdX", "3cXd", "3cXX" where X is non data
}
} else { //No PAD e.g 3cQl
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
return decodedData;
}
/**
* remove WhiteSpace from MIME containing encoded Base64 data.
*
* @param data the byte array of base64 data (with WS)
* @return the new length
*/
protected static int removeWhiteSpace(char[] data) {
if (data == null)
return 0;
// count characters that's not whitespace
int newSize = 0;
int len = data.length;
for (int i = 0; i < len; i++) {
if (!isWhiteSpace(data[i]))
data[newSize++] = data[i];
}
return newSize;
}
}
| |
package net.smartcosmos.pojo.context;
/*
* *#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*
* SMART COSMOS Platform Core SDK
* ===============================================================================
* Copyright (C) 2013 - 2015 SMARTRAC Technology Fletcher, Inc.
* ===============================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#*#
*/
import net.smartcosmos.model.base.EntityReferenceType;
import net.smartcosmos.model.context.IAccount;
import net.smartcosmos.model.context.IMetadata;
import net.smartcosmos.model.context.MetadataDataType;
import net.smartcosmos.util.UuidUtil;
import net.smartcosmos.util.mapper.BooleanMapper;
import net.smartcosmos.util.mapper.DateMapper;
import net.smartcosmos.util.mapper.DoubleMapper;
import net.smartcosmos.util.mapper.FloatMapper;
import net.smartcosmos.util.mapper.IMetadataValueMapper;
import net.smartcosmos.util.mapper.IntegerMapper;
import net.smartcosmos.util.mapper.JsonMapper;
import net.smartcosmos.util.mapper.LongMapper;
import net.smartcosmos.util.mapper.NoopMapper;
import net.smartcosmos.util.mapper.StringMapper;
import org.json.JSONException;
import java.util.UUID;
public class TypeSafeMetadata<T> implements IMetadata
{
private final IMetadata metadataObject;
private IMetadataValueMapper<T> mapper;
public TypeSafeMetadata(IMetadata metadataObject)
{
this.metadataObject = metadataObject;
switch (metadataObject.getDataType())
{
case Custom:
mapper = (IMetadataValueMapper<T>) new NoopMapper();
break;
case StringType:
mapper = (IMetadataValueMapper<T>) new StringMapper();
break;
case DateType:
mapper = (IMetadataValueMapper<T>) new DateMapper();
break;
case JSONType:
mapper = (IMetadataValueMapper<T>) new JsonMapper();
break;
case XMLType:
mapper = (IMetadataValueMapper<T>) new StringMapper();
break;
case IntegerType:
mapper = (IMetadataValueMapper<T>) new IntegerMapper();
break;
case LongType:
mapper = (IMetadataValueMapper<T>) new LongMapper();
break;
case FloatType:
mapper = (IMetadataValueMapper<T>) new FloatMapper();
break;
case DoubleType:
mapper = (IMetadataValueMapper<T>) new DoubleMapper();
break;
case BooleanType:
mapper = (IMetadataValueMapper<T>) new BooleanMapper();
break;
default:
throw new IllegalStateException("Unrecognized type");
}
}
@Override
public IAccount getAccount()
{
return metadataObject.getAccount();
}
@Override
public void setAccount(IAccount account)
{
metadataObject.setAccount(account);
}
@Override
public EntityReferenceType getEntityReferenceType()
{
return metadataObject.getEntityReferenceType();
}
@Override
public void setEntityReferenceType(EntityReferenceType entityReferenceType)
{
metadataObject.setEntityReferenceType(entityReferenceType);
}
@Override
public String getReferenceUrn()
{
return metadataObject.getReferenceUrn();
}
@Override
public void setReferenceUrn(String urn)
{
metadataObject.setReferenceUrn(urn);
}
@Override
public MetadataDataType getDataType()
{
return metadataObject.getDataType();
}
@Override
public void setDataType(MetadataDataType type)
{
throw new UnsupportedOperationException("Type safe metadata object cannot reassign metadata data type");
}
@Override
public String getKey()
{
return metadataObject.getKey();
}
@Override
public void setKey(String key)
{
metadataObject.setKey(key);
}
@Override
public String getRawValue()
{
return metadataObject.getRawValue();
}
public T getValue()
{
return mapper.fromString(metadataObject.getRawValue());
}
public void setValue(T value)
{
metadataObject.setRawValue(mapper.toString(value));
}
@Override
public void setRawValue(String value)
{
metadataObject.setRawValue(value);
}
@Override
public long getLastModifiedTimestamp()
{
return metadataObject.getLastModifiedTimestamp();
}
@Override
public void copy(IMetadata object)
{
throw new UnsupportedOperationException("POJO doesn't support copy operation");
}
@Override
public String getDecodedValue() throws JSONException
{
return metadataObject.getDecodedValue();
}
@Override
public String getMoniker()
{
return metadataObject.getMoniker();
}
@Override
public void setMoniker(String moniker)
{
metadataObject.setMoniker(moniker);
}
@Override
public String getUrn()
{
return metadataObject.getUrn();
}
@Override
public void setUrn(String urn)
{
metadataObject.setUrn(urn);
}
@Override
public UUID getSystemUuid()
{
return UuidUtil.getUuidFromUrn(metadataObject.getUrn());
}
}
| |
/*
* Copyright (c) 2002-2022, City of Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.portal.service.portal;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Properties;
import java.util.Random;
import javax.servlet.http.HttpServletRequest;
import org.springframework.mock.web.MockHttpServletRequest;
import fr.paris.lutece.portal.business.page.Page;
import fr.paris.lutece.portal.service.init.LuteceInitException;
import fr.paris.lutece.portal.service.page.IPageService;
import fr.paris.lutece.portal.service.security.LuteceUser;
import fr.paris.lutece.portal.service.security.MokeLuteceAuthentication;
import fr.paris.lutece.portal.service.security.SecurityService;
import fr.paris.lutece.portal.service.spring.SpringContextService;
import fr.paris.lutece.portal.service.util.AppPropertiesService;
import fr.paris.lutece.test.LuteceTestCase;
public class PortalMenuServiceTest extends LuteceTestCase
{
private static final String ROLE1 = "ROLE1";
private static final String ROLE2 = "ROLE2";
public void testGetMenuContent( )
{
HttpServletRequest request = new MockHttpServletRequest( );
// determine a random page name
String randomPageName = "page" + new SecureRandom( ).nextLong( );
// get the menu
String menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertFalse( "Portal menu should not contain not yet created page with name " + randomPageName, menu.contains( randomPageName ) );
// create the page
Page page = new Page( );
page.setParentPageId( PortalService.getRootPageId( ) );
page.setName( randomPageName );
IPageService pageService = (IPageService) SpringContextService.getBean( "pageService" );
pageService.createPage( page );
// get the menu
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page with name " + randomPageName, menu.contains( randomPageName ) );
// change the page name
randomPageName = randomPageName + "_mod";
page.setName( randomPageName );
pageService.updatePage( page );
// get the menu
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page with the modified name " + randomPageName, menu.contains( randomPageName ) );
// remove the page
pageService.removePage( page.getId( ) );
// get the menu
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertFalse( "Portal menu should not contain page with name " + randomPageName + " anymore", menu.contains( randomPageName ) );
}
public void testPageVisibility( ) throws IOException, LuteceInitException
{
// create pages
final Random rand = new SecureRandom( );
Page pageNoRole = createPage( "page." + rand.nextLong( ) );
Page pageRole1 = createPage( "page.role1." + rand.nextLong( ), ROLE1 );
Page pageRole2 = createPage( "page.role2." + rand.nextLong( ), ROLE2 );
boolean authStatus = enableAuthentication( );
boolean cacheStatus = enablePortalMenuServiceCache( );
try
{
// test twice to test the cache
for ( int i = 0; i < 2; i++ )
{
// test menu content with no user
HttpServletRequest request = new MockHttpServletRequest( );
String menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page not associated with a role named " + pageNoRole.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageNoRole.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE1 + " named " + pageRole1.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole1.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE2 + " named " + pageRole2.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole2.getName( ) ) );
// test menu content with no role
@SuppressWarnings( "serial" )
LuteceUser user = new LuteceUser( "junit", SecurityService.getInstance( ).getAuthenticationService( ) )
{
@Override
public String getName( )
{
// user name is different on each call
return "user" + rand.nextLong( );
}
};
request.getSession( ).setAttribute( "lutece_user", user );
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page not associated with a role named " + pageNoRole.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageNoRole.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE1 + " named " + pageRole1.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole1.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE2 + " named " + pageRole2.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole2.getName( ) ) );
// test menu content with ROLE1
user.setRoles( Arrays.asList( ROLE1 ) );
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page not associated with a role named " + pageNoRole.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageNoRole.getName( ) ) );
assertTrue( "Portal menu should contain page associated with role " + ROLE1 + " named " + pageRole1.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole1.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE2 + " named " + pageRole2.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole2.getName( ) ) );
// test menu content with ROLE2
user.setRoles( Arrays.asList( ROLE2 ) );
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page not associated with a role named " + pageNoRole.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageNoRole.getName( ) ) );
assertFalse(
"Portal menu should not contain page associated with role " + ROLE1 + " named " + pageRole1.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole1.getName( ) ) );
assertTrue( "Portal menu should contain page associated with role " + ROLE2 + " named " + pageRole2.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole2.getName( ) ) );
// test menu content with ROLE1 and ROLE2
user.setRoles( Arrays.asList( ROLE1, ROLE2 ) );
menu = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Portal menu should contain page not associated with a role named " + pageNoRole.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageNoRole.getName( ) ) );
assertTrue( "Portal menu should contain page associated with role " + ROLE1 + " named " + pageRole1.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole1.getName( ) ) );
assertTrue( "Portal menu should contain page associated with role " + ROLE2 + " named " + pageRole2.getName( ) + " (call " + ( i + 1 ) + ")",
menu.contains( pageRole2.getName( ) ) );
// test menu content with ROLE2 and ROLE1
user.setRoles( Arrays.asList( ROLE2, ROLE1 ) );
String menu2 = PortalMenuService.getInstance( ).getMenuContent( 0, PortalMenuService.MODE_NORMAL, PortalMenuService.MENU_MAIN, request );
assertTrue( "Role order should not matter to the cache (call " + ( i + 1 ) + ")", menu == menu2 );
}
}
finally
{
// cleanup
restoreAuthentication( authStatus );
restorePortalMenuServiceCache( cacheStatus );
IPageService pageService = (IPageService) SpringContextService.getBean( "pageService" );
pageService.removePage( pageNoRole.getId( ) );
pageService.removePage( pageRole1.getId( ) );
pageService.removePage( pageRole2.getId( ) );
}
}
private void restorePortalMenuServiceCache( boolean status )
{
PortalMenuService.getInstance( ).enableCache( status );
}
private boolean enablePortalMenuServiceCache( )
{
boolean status = PortalMenuService.getInstance( ).isCacheEnable( );
PortalMenuService.getInstance( ).enableCache( true );
return status;
}
private void restoreAuthentication( boolean status ) throws IOException, LuteceInitException
{
if ( !status )
{
File luteceProperties = new File( getResourcesDir( ), "WEB-INF/conf/lutece.properties" );
Properties props = new Properties( );
InputStream is = new FileInputStream( luteceProperties );
props.load( is );
is.close( );
props.remove( "mylutece.authentication.enable" );
props.remove( "mylutece.authentication.class" );
OutputStream os = new FileOutputStream( luteceProperties );
props.store( os, "saved for junit " + this.getClass( ).getCanonicalName( ) );
os.close( );
AppPropertiesService.reloadAll( );
SecurityService.init( );
}
}
private boolean enableAuthentication( ) throws IOException, LuteceInitException
{
boolean status = SecurityService.isAuthenticationEnable( );
if ( !status )
{
File luteceProperties = new File( getResourcesDir( ), "WEB-INF/conf/lutece.properties" );
Properties props = new Properties( );
InputStream is = new FileInputStream( luteceProperties );
props.load( is );
is.close( );
props.setProperty( "mylutece.authentication.enable", "true" );
props.setProperty( "mylutece.authentication.class", MokeLuteceAuthentication.class.getName( ) );
OutputStream os = new FileOutputStream( luteceProperties );
props.store( os, "saved for junit " + this.getClass( ).getCanonicalName( ) );
os.close( );
AppPropertiesService.reloadAll( );
SecurityService.init( );
}
return status;
}
private Page createPage( String pageName, String role )
{
Page page = new Page( );
page.setParentPageId( PortalService.getRootPageId( ) );
page.setName( pageName );
if ( role != null )
{
page.setRole( role );
}
IPageService pageService = (IPageService) SpringContextService.getBean( "pageService" );
pageService.createPage( page );
return page;
}
private Page createPage( String pageName )
{
return createPage( pageName, null );
}
}
| |
/*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.integration.kernel.modeshape;
import static java.net.URI.create;
import static java.util.Collections.emptySet;
import static javax.jcr.PropertyType.BINARY;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.charset.Charset;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.TimeZone;
import java.util.function.BiFunction;
import java.util.function.Function;
import javax.inject.Inject;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.nodetype.NodeTypeDefinition;
import javax.jcr.nodetype.NodeTypeManager;
import javax.jcr.nodetype.NodeTypeTemplate;
import javax.jcr.security.AccessControlList;
import javax.jcr.security.AccessControlManager;
import javax.jcr.security.Privilege;
import javax.jcr.version.Version;
import org.fcrepo.kernel.api.FedoraRepository;
import org.fcrepo.kernel.api.FedoraSession;
import org.fcrepo.kernel.api.RdfStream;
import org.fcrepo.kernel.api.exception.AccessDeniedException;
import org.fcrepo.kernel.api.exception.ConstraintViolationException;
import org.fcrepo.kernel.api.exception.InvalidChecksumException;
import org.fcrepo.kernel.api.exception.InvalidPrefixException;
import org.fcrepo.kernel.api.exception.MalformedRdfException;
import org.fcrepo.kernel.api.exception.RepositoryRuntimeException;
import org.fcrepo.kernel.api.models.Container;
import org.fcrepo.kernel.api.models.FedoraResource;
import org.fcrepo.kernel.api.models.FedoraTimeMap;
import org.fcrepo.kernel.api.models.FedoraWebacAcl;
import org.fcrepo.kernel.api.rdf.DefaultRdfStream;
import org.fcrepo.kernel.api.services.BinaryService;
import org.fcrepo.kernel.api.services.ContainerService;
import org.fcrepo.kernel.api.services.NodeService;
import org.fcrepo.kernel.api.services.VersionService;
import org.fcrepo.kernel.modeshape.FedoraResourceImpl;
import org.fcrepo.kernel.modeshape.rdf.impl.DefaultIdentifierTranslator;
import org.apache.commons.io.IOUtils;
import org.apache.jena.graph.Graph;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.Triple;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.util.iterator.ExtendedIterator;
import org.apache.jena.vocabulary.RDF;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.modeshape.jcr.security.SimplePrincipal;
import org.springframework.test.context.ContextConfiguration;
import com.google.common.collect.Iterators;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_CONTAINER;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_CREATEDBY;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_LASTMODIFIEDBY;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_NON_RDF_SOURCE_DESCRIPTION;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_REPOSITORY_ROOT;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_RESOURCE;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_TIME_MAP;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_TOMBSTONE;
import static org.fcrepo.kernel.api.FedoraTypes.FEDORA_WEBAC_ACL;
import static org.fcrepo.kernel.api.RdfCollectors.toModel;
import static org.fcrepo.kernel.api.RdfLexicon.LAST_MODIFIED_DATE;
import static org.fcrepo.kernel.api.RdfLexicon.REPOSITORY_NAMESPACE;
import static org.fcrepo.kernel.api.RequiredRdfContext.INBOUND_REFERENCES;
import static org.fcrepo.kernel.api.RequiredRdfContext.PROPERTIES;
import static org.fcrepo.kernel.api.RequiredRdfContext.SERVER_MANAGED;
import static org.fcrepo.kernel.api.RequiredRdfContext.VERSIONS;
import static org.fcrepo.kernel.modeshape.FedoraJcrConstants.FIELD_DELIMITER;
import static org.fcrepo.kernel.modeshape.FedoraJcrConstants.ROOT;
import static org.fcrepo.kernel.modeshape.FedoraResourceImpl.LDPCV_TIME_MAP;
import static org.fcrepo.kernel.modeshape.FedoraResourceImpl.CONTAINER_WEBAC_ACL;
import static org.fcrepo.kernel.modeshape.FedoraSessionImpl.getJcrSession;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.HAS_MIXIN_TYPE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.HAS_NODE_TYPE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.HAS_PRIMARY_IDENTIFIER;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.HAS_PRIMARY_TYPE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.JCR_NAMESPACE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.JCR_NT_NAMESPACE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.MIX_NAMESPACE;
import static org.fcrepo.kernel.modeshape.RdfJcrLexicon.MODE_NAMESPACE;
import static org.fcrepo.kernel.modeshape.utils.FedoraTypesUtils.getJcrNode;
import static org.fcrepo.kernel.modeshape.utils.UncheckedPredicate.uncheck;
import static org.apache.jena.datatypes.xsd.XSDDatatype.XSDstring;
import static org.apache.jena.graph.Node.ANY;
import static org.apache.jena.graph.NodeFactory.createLiteral;
import static org.apache.jena.graph.NodeFactory.createURI;
import static org.apache.jena.rdf.model.ResourceFactory.createPlainLiteral;
import static org.apache.jena.rdf.model.ResourceFactory.createProperty;
import static org.apache.jena.rdf.model.ResourceFactory.createResource;
import static org.apache.jena.vocabulary.DC_11.title;
import static org.apache.jena.vocabulary.RDF.type;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* <p>FedoraResourceImplIT class.</p>
*
* @author ajs6f
*/
@ContextConfiguration({"/spring-test/repo.xml"})
public class FedoraResourceImplIT extends AbstractIT {
@Inject
FedoraRepository repo;
@Inject
NodeService nodeService;
@Inject
ContainerService containerService;
@Inject
BinaryService binaryService;
@Inject
VersionService versionService;
private FedoraSession session;
private DefaultIdentifierTranslator subjects;
@Before
public void setUp() throws RepositoryException {
session = repo.login();
subjects = new DefaultIdentifierTranslator(getJcrSession(session));
}
@After
public void tearDown() {
session.expire();
}
@Test
public void testGetRootNode() throws RepositoryException {
final FedoraSession session = repo.login();
final FedoraResource object = nodeService.find(session, "/");
assertEquals("/", object.getPath());
assertTrue(object.hasType(ROOT));
assertTrue(object.hasType(FEDORA_REPOSITORY_ROOT));
session.expire();
}
private Node createGraphSubjectNode(final FedoraResource obj) {
return subjects.reverse().convert(obj).asNode();
}
@Test
public void testRandomNodeGraph() {
final FedoraResource object = containerService.findOrCreate(session, "/testNodeGraph");
final Node s = subjects.reverse().convert(object).asNode();
final Model rdf = object.getTriples(subjects, PROPERTIES).collect(toModel());
assertFalse(rdf.getGraph().contains(s, HAS_PRIMARY_IDENTIFIER.asNode(), ANY));
assertFalse(rdf.getGraph().contains(s, HAS_PRIMARY_TYPE.asNode(), ANY));
assertFalse(rdf.getGraph().contains(s, HAS_NODE_TYPE.asNode(), ANY));
assertFalse(rdf.getGraph().contains(s, HAS_MIXIN_TYPE.asNode(), ANY));
}
@Test
public void testLastModified() throws RepositoryException {
final String pid = getRandomPid();
containerService.findOrCreate(session, "/" + pid);
try {
session.commit();
} finally {
session.expire();
}
session = repo.login();
final Container obj2 = containerService.findOrCreate(session, "/" + pid);
final Instant created = roundDate(obj2.getCreatedDate());
final Instant modified = roundDate(obj2.getLastModifiedDate());
assertFalse(modified + " should not be before " + created, modified.isBefore(created));
final Graph graph = obj2.getTriples(subjects, PROPERTIES).collect(toModel()).getGraph();
final Node s = createGraphSubjectNode(obj2);
final ExtendedIterator<Triple> iter = graph.find(s, LAST_MODIFIED_DATE.asNode(), ANY);
assertEquals("Should have one lastModified triple", 1, iter.toList().size());
}
@Test
public void testImplicitTouch() throws RepositoryException {
final String pid = getRandomPid();
containerService.findOrCreate(session, "/" + pid);
try {
session.commit();
} finally {
session.expire();
}
session = repo.login();
final Container obj2 = containerService.findOrCreate(session, "/" + pid);
final FedoraResourceImpl impl = new FedoraResourceImpl(getJcrNode(obj2));
final Instant oldMod = impl.getLastModifiedDate();
impl.touch(false, null, null, null, null);
assertTrue(oldMod.isBefore(obj2.getLastModifiedDate()));
}
@Test
public void testTouch() throws RepositoryException {
final Calendar specified = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
specified.add(Calendar.YEAR, 1);
final String pid = getRandomPid();
containerService.findOrCreate(session, "/" + pid);
try {
session.commit();
} finally {
session.expire();
}
session = repo.login();
final Container obj2 = containerService.findOrCreate(session, "/" + pid);
final FedoraResourceImpl impl = new FedoraResourceImpl(getJcrNode(obj2));
final String specifiedUser = "me";
specified.add(Calendar.YEAR, 1);
impl.touch(false, specified, specifiedUser, specified, specifiedUser);
assertEquals(specifiedUser, impl.getNode().getProperty(FEDORA_LASTMODIFIEDBY).getString());
assertEquals(specifiedUser, impl.getNode().getProperty(FEDORA_CREATEDBY).getString());
assertEquals(specified.getTime(), Date.from(obj2.getLastModifiedDate()));
assertEquals(specified.getTime(), Date.from(obj2.getCreatedDate()));
}
@Test
public void testRepositoryRootGraph() {
final FedoraResource object = nodeService.find(session, "/");
final Graph graph = object.getTriples(subjects, SERVER_MANAGED).collect(toModel()).getGraph();
final Node s = createGraphSubjectNode(object);
Node p =
createURI(REPOSITORY_NAMESPACE
+ "repositoryJcrRepositoryVendorUrl");
Node o = createLiteral("http://www.modeshape.org");
assertFalse(graph.contains(s, p, o));
p = HAS_NODE_TYPE.asNode();
o = createLiteral(FEDORA_RESOURCE);
assertFalse(graph.contains(s, p, o));
assertTrue(graph.contains(s, type.asNode(), createURI(REPOSITORY_NAMESPACE + "Resource")));
assertTrue(graph.contains(s, type.asNode(), createURI(REPOSITORY_NAMESPACE + "RepositoryRoot")));
assertTrue(graph.contains(s, type.asNode(), createURI(REPOSITORY_NAMESPACE + "Container")));
}
@Test
public void testObjectGraph() {
final String pid = "/" + getRandomPid();
final FedoraResource object =
containerService.findOrCreate(session, pid);
final Graph graph = object.getTriples(subjects, SERVER_MANAGED).collect(toModel()).getGraph();
// jcr property
Node s = createGraphSubjectNode(object);
Node p = HAS_PRIMARY_IDENTIFIER.asNode();
assertFalse(graph.contains(s, p, ANY));
// multivalued property
s = createGraphSubjectNode(object);
p = HAS_MIXIN_TYPE.asNode();
Node o = createLiteral(FEDORA_RESOURCE);
assertFalse(graph.contains(s, p, o));
o = createLiteral(FEDORA_CONTAINER);
assertFalse(graph.contains(s, p, o));
}
@Test
public void testObjectGraphWithCustomProperty() throws RepositoryException {
FedoraResource object =
containerService.findOrCreate(session, "/testObjectGraph");
final javax.jcr.Node node = getJcrNode(object);
node.setProperty("dc:title", "this-is-some-title");
node.setProperty("dc:subject", "this-is-some-subject-stored-as-a-binary", BINARY);
node.setProperty("jcr:data", "jcr-data-should-be-ignored", BINARY);
session.commit();
session.expire();
session = repo.login();
object = containerService.findOrCreate(session, "/testObjectGraph");
final Graph graph = object.getTriples(subjects, PROPERTIES).collect(toModel()).getGraph();
// jcr property
final Node s = createGraphSubjectNode(object);
Node p = title.asNode();
Node o = createLiteral("this-is-some-title");
assertTrue(graph.contains(s, p, o));
p = createURI("http://purl.org/dc/elements/1.1/subject");
o = createLiteral("this-is-some-subject-stored-as-a-binary");
assertTrue(graph.contains(s, p, o));
p = ANY;
o = createLiteral("jcr-data-should-be-ignored");
assertFalse(graph.contains(s, p, o));
}
@Test
public void testRdfTypeInheritance() throws RepositoryException {
final Session jcrSession = getJcrSession(session);
final NodeTypeManager mgr = jcrSession.getWorkspace().getNodeTypeManager();
//create supertype mixin
final NodeTypeTemplate type1 = mgr.createNodeTypeTemplate();
type1.setName("test:aSupertype");
type1.setMixin(true);
final NodeTypeDefinition[] nodeTypes = new NodeTypeDefinition[]{type1};
mgr.registerNodeTypes(nodeTypes, true);
//create a type inheriting above supertype
final NodeTypeTemplate type2 = mgr.createNodeTypeTemplate();
type2.setName("test:testInher");
type2.setMixin(true);
type2.setDeclaredSuperTypeNames(new String[]{"test:aSupertype"});
final NodeTypeDefinition[] nodeTypes2 = new NodeTypeDefinition[]{type2};
mgr.registerNodeTypes(nodeTypes2, true);
//create object with inheriting type
FedoraResource object = containerService.findOrCreate(session, "/testNTTnheritanceObject");
final javax.jcr.Node node = getJcrNode(object);
node.addMixin("test:testInher");
session.commit();
session.expire();
session = repo.login();
object = containerService.findOrCreate(session, "/testNTTnheritanceObject");
//test that supertype has been inherited as rdf:type
final Node s = createGraphSubjectNode(object);
final Node p = type.asNode();
final Node o = createProperty("info:fedora/test/aSupertype").asNode();
assertTrue("supertype test:aSupertype not found inherited in test:testInher!",
object.getTriples(subjects, PROPERTIES).collect(toModel()).getGraph().contains(s, p, o));
}
@Test
public void testDatastreamGraph() throws RepositoryException, InvalidChecksumException {
final Container parentObject = containerService.findOrCreate(session, "/testDatastreamGraphParent");
final Session jcrSession = getJcrSession(session);
binaryService.findOrCreate(session, "/testDatastreamGraph").setContent(
new ByteArrayInputStream("123456789test123456789".getBytes()),
"text/plain",
null,
null,
null
);
final FedoraResource object = binaryService.findOrCreate(session, "/testDatastreamGraph").getDescription();
getJcrNode(object).setProperty("fedora:isPartOf",
jcrSession.getNode("/testDatastreamGraphParent"));
final Graph graph = object.getTriples(subjects, PROPERTIES).collect(toModel()).getGraph();
// multivalued property
final Node s = createGraphSubjectNode(object.getDescribedResource());
Node p = HAS_MIXIN_TYPE.asNode();
Node o = createLiteral(FEDORA_RESOURCE);
assertFalse(graph.contains(s, p, o));
o = createLiteral(FEDORA_NON_RDF_SOURCE_DESCRIPTION);
assertFalse(graph.contains(s, p, o));
// structure
//TODO: re-enable number of children reporting, if practical
//assertTrue(datasetGraph.contains(ANY, s, p, o));
// relations
p = createURI(REPOSITORY_NAMESPACE + "isPartOf");
o = createGraphSubjectNode(parentObject);
assertTrue(graph.contains(s, p, o));
}
@Test
public void testUpdatingObjectGraph() {
final Node subject = createURI("info:fedora/testObjectGraphUpdates");
final FedoraResource object =
containerService.findOrCreate(session, "/testObjectGraphUpdates");
object.updateProperties(subjects, "INSERT { " + "<"
+ createGraphSubjectNode(object).getURI() + "> "
+ "<info:fcrepo/zyx> \"a\" } WHERE {} ", object.getTriples(subjects, emptySet()));
// jcr property
final Resource s = createResource(createGraphSubjectNode(object).getURI());
final Property p = createProperty("info:fcrepo/zyx");
Literal o = createPlainLiteral("a");
Model model = object.getTriples(subjects, PROPERTIES).collect(toModel());
assertTrue(model.contains(s, p, o));
object.updateProperties(subjects, "DELETE { " + "<"
+ createGraphSubjectNode(object).getURI() + "> "
+ "<info:fcrepo/zyx> ?o }\n" + "INSERT { " + "<"
+ createGraphSubjectNode(object).getURI() + "> "
+ "<info:fcrepo/zyx> \"b\" } " + "WHERE { " + "<"
+ createGraphSubjectNode(object).getURI() + "> "
+ "<info:fcrepo/zyx> ?o } ", DefaultRdfStream.fromModel(subject, model));
model = object.getTriples(subjects, PROPERTIES).collect(toModel());
assertFalse("found value we should have removed", model.contains(s, p, o));
o = createPlainLiteral("b");
assertTrue("could not find new value", model.contains(s, p, o));
}
/**
* Test technically correct DELETE/WHERE with multiple patterns on the same subject and predicate.
* See also FCREPO-2391
*/
@Test
public void testUpdatesWithMultiplePredicateMatches() {
final Node subject = createURI("info:fedora/testUpdatesWithMultiplePredicateMatches");
final FedoraResource object =
containerService.findOrCreate(session, "/testUpdatesWithMultiplePredicateMatches");
final Function<String, String> read = i -> {
try {
return IOUtils.toString(getClass().getResource(i), Charset.forName("UTF8"));
} catch (final IOException ex) {
throw new UncheckedIOException(ex);
}
};
final Function<FedoraResource, Model> modelOf = o -> {
return o.getTriples(subjects, PROPERTIES).collect(toModel());
};
final BiFunction<FedoraResource, String, FedoraResource> update = (o, s) -> {
o.updateProperties(subjects, read.apply(s), DefaultRdfStream.fromModel(subject, modelOf.apply(o)));
return o;
};
update.apply(update.apply(object, "/patch-test/insert-data.txt"), "/patch-test/delete-where.txt");
final Resource s = createResource(createGraphSubjectNode(object).getURI());
final Property pid = createProperty("info:fedora/fedora-system:def/model#PID");
final Literal o = createPlainLiteral("cdc:17256");
final Model model = modelOf.apply(object);
assertTrue(model.contains(s, pid, o));
final Property dcSubject = createProperty("http://purl.org/dc/elements/1.1/subject");
assertFalse(model.contains(s, dcSubject, (RDFNode)null));
}
@Test
public void testGetRootObjectTypes() {
final FedoraResource object = nodeService.find(session, "/");
final List<URI> types = object.getTypes();
assertFalse(types.stream()
.map(x -> x.toString())
.anyMatch(x -> x.startsWith(JCR_NAMESPACE) || x.startsWith(MIX_NAMESPACE) ||
x.startsWith(MODE_NAMESPACE) || x.startsWith(JCR_NT_NAMESPACE)));
}
@Test
public void testGetObjectTypes() {
final FedoraResource object =
containerService.findOrCreate(session, "/testObjectVersionGraph");
final List<URI> types = object.getTypes();
assertTrue(types.contains(create(REPOSITORY_NAMESPACE + "Container")));
assertTrue(types.contains(create(REPOSITORY_NAMESPACE + "Resource")));
assertFalse(types.stream()
.map(x -> x.toString())
.anyMatch(x -> x.startsWith(JCR_NAMESPACE) || x.startsWith(MIX_NAMESPACE) ||
x.startsWith(MODE_NAMESPACE) || x.startsWith(JCR_NT_NAMESPACE)));
}
@Test
@Ignore("Until implemented with Memento")
public void testGetObjectVersionGraph() throws RepositoryException {
final FedoraResource object =
containerService.findOrCreate(session, "/testObjectVersionGraph");
getJcrNode(object).addMixin("mix:versionable");
session.commit();
final Instant theDate = Instant.now();
// create a version and make sure there are 2 versions (root + created)
versionService.createVersion(session, object, subjects, theDate);
session.commit();
final Model graphStore = object.getTriples(subjects, VERSIONS).collect(toModel());
logger.debug(graphStore.toString());
// go querying for the version URI
final Resource s = createResource(createGraphSubjectNode(object).getURI());
// final ExtendedIterator<Statement> triples = graphStore.listStatements(s,HAS_VERSION, (RDFNode)null);
// final List<Statement> list = triples.toList();
// assertEquals(1, list.size());
// make sure the URI is derived from the label
// s = list.get(0).getObject().asResource();
// assertEquals("URI should be derived from label.", s.getURI(), createGraphSubjectNode(object).getURI()
// + "/" + FCR_VERSIONS + "/v0.0.1");
// make sure the label is listed
// assertTrue(graphStore.contains(s, HAS_VERSION_LABEL, createPlainLiteral("v0.0.1")));
}
@Test(expected = MalformedRdfException.class)
public void testAddMissingReference() throws MalformedRdfException {
final FedoraResource object =
containerService.findOrCreate(session, "/testRefObject");
object.updateProperties(
subjects,
"PREFIX example: <http://example.org/>\n"
+ "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n"
+ "PREFIX fedora: <" + REPOSITORY_NAMESPACE + ">\n"
+ "INSERT { <> fedora:isPartOf <" + subjects.toDomain("/some-path") + ">}"
+ "WHERE { }", object.getTriples(subjects, emptySet()));
}
@Test(expected = AccessDeniedException.class)
public void testUpdateDenied() throws RepositoryException {
final FedoraResource object =
containerService.findOrCreate(session, "/testRefObject");
try {
object.updateProperties(
subjects,
"INSERT { <> <http://purl.org/dc/elements/1.1/title> \"test-original\". }"
+ " WHERE { }", object.getTriples(subjects, emptySet()));
} catch (final AccessDeniedException e) {
fail("Should fail at update, not create property");
}
final Session jcrSession = getJcrSession(session);
final AccessControlManager acm = jcrSession.getAccessControlManager();
final Privilege[] permissions = new Privilege[] {acm.privilegeFromName(Privilege.JCR_READ)};
final AccessControlList acl = (AccessControlList) acm.getApplicablePolicies("/testRefObject").next();
acl.addAccessControlEntry(SimplePrincipal.newInstance("anonymous"), permissions);
acm.setPolicy("/testRefObject", acl);
session.commit();
object.updateProperties(
subjects,
"INSERT { <> <http://purl.org/dc/elements/1.1/title> \"test-update\". }"
+ " WHERE { }", object.getTriples(subjects, emptySet()));
}
@Test (expected = IllegalArgumentException.class)
public void testInvalidSparqlUpdateValidation() {
final String pid = getRandomPid();
final FedoraResource object =
containerService.findOrCreate(session, pid);
object.updateProperties(
subjects,
"INSERT { <> <http://myurl.org/title/> \"fancy title\" . \n" +
" <> <http://myurl.org/title/> \"fancy title 2\" . } WHERE { }",
object.getTriples(subjects, emptySet()));
}
@Test (expected = InvalidPrefixException.class)
public void testInvalidPrefixSparqlUpdateValidation() {
final String pid = getRandomPid();
final FedoraResource object =
containerService.findOrCreate(session, pid);
object.updateProperties(
subjects,
"PREFIX pcdm: <http://pcdm.org/models#>\n"
+ "INSERT { <> a pcdm:Object}\n"
+ "WHERE { }", object.getTriples(subjects, emptySet()));
object.updateProperties(
subjects,
"PREFIX pcdm: <http://garbage.org/models#>\n"
+ "INSERT { <> a pcdm:Garbage}\n"
+ "WHERE { }", object.getTriples(subjects, emptySet()));
}
@Test
public void testValidSparqlUpdateWithLiteralTrailingSlash() {
final String pid = getRandomPid();
final FedoraResource object = containerService.findOrCreate(session, pid);
object.updateProperties(
subjects,
"INSERT { <> <http://myurl.org/title> \"fancy title/\" . \n" +
" <> <http://myurl.org/title> \"fancy title 2<br/>\" . } WHERE { }",
object.getTriples(subjects, emptySet()));
}
@Test
public void testValidSparqlUpdateValidationAltSyntax() {
final String pid = getRandomPid();
final FedoraResource object = containerService.findOrCreate(session, pid);
object.updateProperties(subjects,
"DELETE WHERE {" +
"<> <http://www.loc.gov/premis/rdf/v1#hasDateCreatedByApplication> ?o0 ." +
"}; INSERT DATA {" +
"<> <http://purl.org/dc/elements/1.1/title> \"Example Managed binary datastream\" ." +
"}",
object.getTriples(subjects, emptySet()));
}
@Test (expected = IllegalArgumentException.class)
public void testInvalidSparqlUpdateValidationAltSyntax() {
final String pid = getRandomPid();
final FedoraResource object = containerService.findOrCreate(session, pid);
object.updateProperties(subjects,
"DELETE WHERE {" +
"<> <http://www.loc.gov/premis/rdf/v1#hasDateCreatedByApplication> ?o0 ." +
"}; INSERT DATA {" +
"<> <http://purl.org/dc/elements/1.1/title/> \"Example Managed binary datastream\" ." +
"}",
object.getTriples(subjects, emptySet()));
}
@Test
public void testValidSparqlUpdateValidation1() {
final String pid = getRandomPid();
final FedoraResource object =
containerService.findOrCreate(session, pid);
object.updateProperties(
subjects,
"INSERT { <> <http://myurl.org/title> \"5\" . } WHERE { }",
object.getTriples(subjects, emptySet()));
}
@Test
public void testValidSparqlUpdateValidation2() {
final String pid = getRandomPid();
final FedoraResource object =
containerService.findOrCreate(session, pid);
object.updateProperties(
subjects,
"PREFIX dsc:<http://myurl.org/title> \n" +
"INSERT { <> dsc:p \"ccc\" } WHERE { }",
object.getTriples(subjects, emptySet()));
}
@Test
public void testUpdatingRdfType() throws RepositoryException {
final FedoraResource object =
containerService.findOrCreate(session, "/testObjectRdfType");
object.updateProperties(subjects, "INSERT { <"
+ createGraphSubjectNode(object).getURI() + "> <" + RDF.type
+ "> <http://some/uri> } WHERE { }", object.getTriples(subjects, emptySet() ));
assertTrue(getJcrNode(object).isNodeType("{http://some/}uri"));
}
@Test
public void testRemoveRdfType() throws RepositoryException {
final FedoraResource object =
containerService.findOrCreate(session, "/testRemoveObjectRdfType");
object.updateProperties(subjects, "INSERT { <"
+ createGraphSubjectNode(object).getURI() + "> <" + RDF.type
+ "> <http://some/uri> } WHERE { }", object.getTriples(subjects, PROPERTIES));
assertTrue(getJcrNode(object).isNodeType("{http://some/}uri"));
object.updateProperties(subjects, "DELETE { <"
+ createGraphSubjectNode(object).getURI() + "> <" + RDF.type
+ "> <http://some/uri> } WHERE { }", object.getTriples(subjects, PROPERTIES));
assertFalse(getJcrNode(object).isNodeType("{http://some/}uri"));
}
@Test
public void testEtagValue() throws RepositoryException {
final FedoraResource object =
containerService.findOrCreate(session, "/testEtagObject");
session.commit();
final String actual = object.getEtagValue();
assertNotNull(actual);
assertNotEquals("", actual);
}
@Test
public void testGetReferences() throws RepositoryException {
final String pid = getRandomPid();
final Session jcrSession = getJcrSession(session);
containerService.findOrCreate(session, pid);
final Container subject = containerService.findOrCreate(session, pid + "/a");
final Container object = containerService.findOrCreate(session, pid + "/b");
final Value value = jcrSession.getValueFactory().createValue(getJcrNode(object));
getJcrNode(subject).setProperty("fedora:isPartOf", new Value[] { value });
session.commit();
final Model model = object.getTriples(subjects, INBOUND_REFERENCES).collect(toModel());
assertTrue(
model.contains(subjects.reverse().convert(subject),
ResourceFactory.createProperty(REPOSITORY_NAMESPACE + "isPartOf"),
subjects.reverse().convert(object))
);
}
@Test
public void testReplaceProperties() throws RepositoryException {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, pid);
final Session jcrSession = getJcrSession(session);
try (final RdfStream triples = object.getTriples(subjects, PROPERTIES)) {
final Model model = triples.collect(toModel());
final Resource resource = model.createResource();
final Resource subject = subjects.reverse().convert(object);
final Property predicate = model.createProperty("info:xyz");
model.add(subject, predicate, resource);
model.add(resource, model.createProperty("http://purl.org/dc/elements/1.1/title"), "xyz");
object.replaceProperties(subjects, model, object.getTriples(subjects, PROPERTIES));
@SuppressWarnings("unchecked")
final Iterator<javax.jcr.Property> properties = getJcrNode(object).getProperties();
final Iterator<javax.jcr.Property> relation = Iterators.filter(properties, uncheck(
(final javax.jcr.Property p) -> p.getName().contains("xyz_ref"))::test);
assertTrue(relation.hasNext());
final javax.jcr.Property next = relation.next();
final Value[] values = next.getValues();
assertEquals(1, values.length);
final javax.jcr.Node skolemizedNode = jcrSession.getNodeByIdentifier(values[0].getString());
assertTrue(skolemizedNode.getPath().contains("/#/"));
assertEquals("xyz" + FIELD_DELIMITER + XSDstring.getURI(),
skolemizedNode.getProperty("dc:title").getValues()[0].getString());
}
}
@Test
public void testReplacePropertiesHashURIs() throws RepositoryException {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, pid);
final Model model = object.getTriples(subjects, PROPERTIES).collect(toModel());
final Resource hashResource = createResource(createGraphSubjectNode(object).getURI() + "#creator");
final Property foafName = model.createProperty("http://xmlns.com/foaf/0.1/name");
final Literal nameValue = model.createLiteral("xyz");
final Resource foafPerson = createResource("http://xmlns.com/foaf/0.1/Person");
model.add(hashResource, foafName, nameValue);
model.add(hashResource, type, foafPerson);
final Resource subject = subjects.reverse().convert(object);
final Property dcCreator = model.createProperty("http://purl.org/dc/elements/1.1/creator");
model.add(subject, dcCreator, hashResource);
object.replaceProperties(subjects, model, object.getTriples(subjects, PROPERTIES));
assertEquals(1, getJcrNode(object).getNode("#").getNodes().getSize());
final Model updatedModel = object.getTriples(subjects, PROPERTIES).collect(toModel());
updatedModel.remove(hashResource, foafName, nameValue);
object.replaceProperties(subjects, updatedModel, object.getTriples(subjects, PROPERTIES));
assertEquals(1, getJcrNode(object).getNode("#").getNodes().getSize());
final Model updatedModel2 = object.getTriples(subjects, PROPERTIES).collect(toModel());
updatedModel2.remove(hashResource, type, foafPerson);
object.replaceProperties(subjects, updatedModel2, object.getTriples(subjects, PROPERTIES));
assertEquals(1, getJcrNode(object).getNode("#").getNodes().getSize());
final Model updatedModel3 = object.getTriples(subjects, PROPERTIES).collect(toModel());
updatedModel3.remove(subject, dcCreator, hashResource);
object.replaceProperties(subjects, updatedModel3, object.getTriples(subjects, PROPERTIES));
assertEquals(0, getJcrNode(object).getNode("#").getNodes().getSize());
}
@Test (expected = ConstraintViolationException.class)
public void testReplacePropertyBadMimeType() {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, pid);
try (final RdfStream triples = object.getTriples(subjects, PROPERTIES)) {
final Model model = triples.collect(toModel());
final Resource resource = model.createResource();
final Resource subject = subjects.reverse().convert(object);
final Property predicate = model.createProperty(
"http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#hasMimeType");
model.add(subject, predicate, "--Total Junk Mime Type--");
model.add(resource, model.createProperty("http://purl.org/dc/elements/1.1/title"), "xyz");
object.replaceProperties(subjects, model, object.getTriples(subjects, PROPERTIES));
}
}
@Test (expected = IllegalArgumentException.class)
public void testUpdatePropertyBadMimeType() {
final String pid = getRandomPid();
final FedoraResource object = containerService.findOrCreate(session, pid);
object.updateProperties(subjects,
"PREFIX ebucore: <http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#>\n" +
"INSERT { <> ebucore:hasMimeType \"-- Complete Junk --\"" + " . } WHERE { }",
object.getTriples(subjects, emptySet()));
}
@Test
public void testDeleteObject() throws RepositoryException {
final String pid = getRandomPid();
final Session jcrSession = getJcrSession(session);
containerService.findOrCreate(session, "/" + pid);
session.commit();
containerService.findOrCreate(session, "/" + pid).delete();
session.commit();
assertTrue(jcrSession.getNode("/" + pid).isNodeType(FEDORA_TOMBSTONE));
}
@Test
public void testDeleteObjectWithInboundReferences() throws RepositoryException {
final String pid = getRandomPid();
final FedoraResource resourceA = containerService.findOrCreate(session, "/" + pid + "/a");
final FedoraResource resourceB = containerService.findOrCreate(session, "/" + pid + "/b");
final Session jcrSession = getJcrSession(session);
final Value value = jcrSession.getValueFactory().createValue(getJcrNode(resourceB));
getJcrNode(resourceA).setProperty("fedora:hasMember", new Value[] { value });
session.commit();
containerService.findOrCreate(session, "/" + pid + "/a").delete();
session.commit();
containerService.findOrCreate(session, "/" + pid + "/b").delete();
session.commit();
assertTrue(jcrSession.getNode("/" + pid + "/b").isNodeType(FEDORA_TOMBSTONE));
}
@Test
public void testDeleteObjectWithInboundReferencesToChildren() throws RepositoryException {
// Set up resources
final String pid = getRandomPid();
final FedoraResource resourceA = containerService.findOrCreate(session, "/" + pid + "/a");
containerService.findOrCreate(session, "/" + pid + "/b");
final FedoraResource resourceX = containerService.findOrCreate(session, "/" + pid + "/b/x");
final Session jcrSession = getJcrSession(session);
// Create a Weak reference
final Value value = jcrSession.getValueFactory().createValue(getJcrNode(resourceX), true);
getJcrNode(resourceA).setProperty("fedora:hasMember", new Value[] { value });
session.commit();
// Verify that relationship exists
final Node s = subjects.reverse().convert(resourceA).asNode();
final Node hasMember = createProperty(REPOSITORY_NAMESPACE, "hasMember").asNode();
final Model rdf = resourceA.getTriples(subjects, PROPERTIES).collect(toModel());
assertTrue(rdf.toString(), rdf.getGraph().contains(s, hasMember, ANY));
// Delete parent of reference target
containerService.findOrCreate(session, "/" + pid + "/b").delete();
session.commit();
// Verify that relationship does NOT exist, and that the resource successfully loads.
containerService.find(session, "/" + pid + "/a");
final Model rdfAfter = resourceA.getTriples(subjects, PROPERTIES).collect(toModel());
assertFalse(rdfAfter.getGraph().contains(s, hasMember, ANY));
}
@Test
public void testGetContainer() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid + "/a");
assertEquals(container, resource.getContainer());
}
@Test
public void testGetChildren() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid + "/a");
assertEquals(resource, container.getChildren().findFirst().get());
}
@Test
public void testGetChildrenRecursively() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
containerService.findOrCreate(session, "/" + pid + "/a");
containerService.findOrCreate(session, "/" + pid + "/a/b");
containerService.findOrCreate(session, "/" + pid + "/a/b/c");
containerService.findOrCreate(session, "/" + pid + "/a/c/d");
containerService.findOrCreate(session, "/" + pid + "/a/c/e");
assertEquals(5, container.getChildren(true).count());
assertEquals(1, container.getChildren(false).count());
}
@Test
public void testGetChildrenWithBinary() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = binaryService.findOrCreate(session, "/" + pid + "/a");
assertEquals(resource, container.getChildren().findFirst().get());
}
@Test
public void testGetContainerForBinary() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = binaryService.findOrCreate(session, "/" + pid + "/a");
assertEquals(container, resource.getContainer());
}
@Test
public void testGetContainerWithHierarchy() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid + "/a/b/c/d");
assertEquals(container, resource.getContainer());
}
@Test
public void testGetChildrenWithHierarchy() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid + "/a/b/c/d");
assertEquals(resource, container.getChildren().findFirst().get());
}
@Test
public void testGetChildrenTombstonesAreHidden() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid + "/a");
resource.delete();
assertFalse(container.getChildren().findFirst().isPresent());
}
@Test
public void testGetChildrenHidesHashUris() {
final String pid = getRandomPid();
final Container container = containerService.findOrCreate(session, "/" + pid);
containerService.findOrCreate(session, "/" + pid + "/#/a");
assertFalse(container.getChildren().findFirst().isPresent());
}
@Test
// @Ignore ("Until implemented with Memento")
public void testDeleteLinkedVersionedResources() throws RepositoryException {
final Container object1 = containerService.findOrCreate(session, "/" + getRandomPid());
final Container object2 = containerService.findOrCreate(session, "/" + getRandomPid());
object2.enableVersioning();
session.commit();
// Create a link between objects 1 and 2
object2.updateProperties(subjects, "PREFIX example: <http://example.org/>\n" +
"INSERT { <> <example:link> " + "<" + createGraphSubjectNode(object1).getURI() + ">" +
" } WHERE {} ",
object2.getTriples(subjects, emptySet()));
final Instant theDate = Instant.now();
// Create version of object2
versionService.createVersion(session, object2, subjects, theDate);
// Verify that the objects exist
assertTrue("object1 should exist!", exists(object1));
assertTrue("object2 should exist!", exists(object2));
// This is the test: verify successful deletion of the objects
object2.delete();
session.commit();
object1.delete();
session.commit();
// Double-verify that the objects are gone
assertFalse("/object2 should NOT exist!", exists(object2));
assertFalse("/object1 should NOT exist!", exists(object1));
}
private boolean exists(final Container resource) {
try {
resource.getPath();
return true;
} catch (final RepositoryRuntimeException e) {
return false;
}
}
@Test
@Ignore ("Until implemented with Memento")
public void testDisableVersioning() throws RepositoryException {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, "/" + pid);
object.enableVersioning();
session.commit();
assertTrue(object.isVersioned());
object.disableVersioning();
assertFalse(object.isVersioned());
}
@Test (expected = RepositoryRuntimeException.class)
@Ignore ("Until implemented with Memento")
public void testDisableVersioningException() {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, "/" + pid);
object.disableVersioning();
}
@Test
public void testHash() throws RepositoryException {
final String pid = getRandomPid();
final Container object = containerService.findOrCreate(session, "/" + pid);
object.enableVersioning();
session.commit();
final FedoraResourceImpl frozenResource = new FedoraResourceImpl(getJcrNode(object));
assertFalse(frozenResource.hashCode() == 0);
}
@Test
public void testDeletePartOfMultiValueProperty() throws RepositoryException {
final String pid = getRandomPid();
final String relation = "test:fakeRel";
containerService.findOrCreate(session, pid);
final Container subject = containerService.findOrCreate(session, pid + "/a");
final Container referent1 = containerService.findOrCreate(session, pid + "/b");
final Container referent2 = containerService.findOrCreate(session, pid + "/c");
final Session jcrSession = getJcrSession(session);
final Value[] values = new Value[2];
values[0] = jcrSession.getValueFactory().createValue(getJcrNode(referent1));
values[1] = jcrSession.getValueFactory().createValue(getJcrNode(referent2));
getJcrNode(subject).setProperty(relation, values);
session.commit();
final Model model1 = referent1.getTriples(subjects, INBOUND_REFERENCES).collect(toModel());
assertTrue(model1.contains(subjects.reverse().convert(subject),
createProperty("info:fedora/test/fakeRel"),
createResource("info:fedora/" + pid + "/b")));
assertTrue(model1.contains(subjects.reverse().convert(subject),
createProperty("info:fedora/test/fakeRel"),
createResource("info:fedora/" + pid + "/c")));
// This is the test! Ensure that only the delete resource is removed from the "subject" container.
referent2.delete();
final Model model2 = referent1.getTriples(subjects, INBOUND_REFERENCES).collect(toModel());
assertTrue(model2.contains(subjects.reverse().convert(subject),
createProperty("info:fedora/test/fakeRel"),
createResource("info:fedora/" + pid + "/b")));
assertFalse(model2.contains(subjects.reverse().convert(subject),
createProperty("info:fedora/test/fakeRel"),
createResource("info:fedora/" + pid + "/c")));
}
@Test
public void testFindOrCreateTimeMapLDPCv() throws RepositoryException {
final String pid = getRandomPid();
final Session jcrSession = getJcrSession(session);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid);
session.commit();
// Create TimeMap (LDPCv)
final FedoraResource ldpcvResource = resource.findOrCreateTimeMap();
assertNotNull(ldpcvResource);
assertEquals("/" + pid + "/" + LDPCV_TIME_MAP, ldpcvResource.getPath());
session.commit();
final javax.jcr.Node timeMapNode = jcrSession.getNode("/" + pid).getNode(LDPCV_TIME_MAP);
assertTrue(timeMapNode.isNodeType(FEDORA_TIME_MAP));
final FedoraResource timeMap = resource.getTimeMap();
assertTrue(timeMap instanceof FedoraTimeMap);
assertEquals(timeMapNode, ((FedoraResourceImpl)timeMap).getNode());
}
@Test
public void testGetMementoByDatetime() throws RepositoryException {
final FedoraResource object1 = containerService.findOrCreate(session, "/" + getRandomPid());
object1.enableVersioning();
final DateTimeFormatter FMT = new DateTimeFormatterBuilder()
.appendPattern("yyyy-MM-dd'T'HH:mm:ss")
.toFormatter()
.withZone(ZoneId.systemDefault());
final Instant time1 = Instant.from(FMT.parse("2018-01-01T20:15:00"));
final FedoraResource memento1 = versionService.createVersion(session, object1, subjects, time1);
final Instant time2 = Instant.from(FMT.parse("2018-01-01T10:15:00"));
final FedoraResource memento2 = versionService.createVersion(session, object1, subjects, time2);
final Instant time3 = Instant.from(FMT.parse("2017-12-31T08:00:00"));
final FedoraResource memento3 = versionService.createVersion(session, object1, subjects, time3);
session.commit();
final Instant afterLast = Instant.from(FMT.parse("2018-02-01T10:00:00"));
assertEquals("Did not get expected Memento for Datetime", memento1,
object1.findMementoByDatetime(afterLast));
final Instant betweenLastAndMiddle =
Instant.from(FMT.parse("2018-01-01T15:00:00"));
assertEquals("Did not get expected Memento for Datetime", memento2,
object1.findMementoByDatetime(betweenLastAndMiddle));
final Instant betweenMiddleAndFirst =
Instant.from(FMT.parse("2018-01-01T08:00:00"));
assertEquals("Did not get expected Memento for Datetime", memento3,
object1.findMementoByDatetime(betweenMiddleAndFirst));
// Assert exact matches
assertEquals("Did not get expected Memento for Datetime", memento1,
object1.findMementoByDatetime(time1));
assertEquals("Did not get expected Memento for Datetime", memento2,
object1.findMementoByDatetime(time2));
assertEquals("Did not get expected Memento for Datetime", memento3,
object1.findMementoByDatetime(time3));
final Instant beforeFirst = Instant.from(FMT.parse("2016-01-01T00:00:00"));
assertEquals("Did not get expected Memento for Datetime", memento3,
object1.findMementoByDatetime(beforeFirst));
}
@Test
public void testGetMementoByDatetimeEmpty() {
final FedoraResource object1 = containerService.findOrCreate(session, "/" + getRandomPid());
object1.enableVersioning();
final DateTimeFormatter FMT = new DateTimeFormatterBuilder()
.appendPattern("yyyy-MM-dd'T'HH:mm:ss")
.toFormatter()
.withZone(ZoneId.systemDefault());
final Instant time = Instant.from(FMT.parse("2016-04-21T09:43:00"));
assertNull("Expected the null back because 0 Mementos.",
object1.findMementoByDatetime(time));
}
@Test
public void testGetAcl() throws RepositoryException {
final String pid = getRandomPid();
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid);
session.commit();
// Retrieve ACL for the resource created
final FedoraResource nullAclResource = resource.getAcl();
assertNull(nullAclResource);
// Create ACL for the resource
final FedoraResource aclResource = resource.findOrCreateAcl();
session.commit();
final FedoraResource aclResourceFound = resource.getAcl();
assertNotNull(aclResourceFound);
assertTrue(aclResourceFound instanceof FedoraWebacAcl);
assertEquals(aclResource, aclResourceFound);
}
@Test
public void testFindOrCreateAcl() throws RepositoryException {
final String pid = getRandomPid();
final Session jcrSession = getJcrSession(session);
final FedoraResource resource = containerService.findOrCreate(session, "/" + pid);
session.commit();
// Create ACL for the resource
final FedoraResource aclResource = resource.findOrCreateAcl();
assertNotNull(aclResource);
assertTrue(aclResource instanceof FedoraWebacAcl);
assertEquals("/" + pid + "/" + CONTAINER_WEBAC_ACL, aclResource.getPath());
session.commit();
final javax.jcr.Node aclNode = jcrSession.getNode("/" + pid).getNode(CONTAINER_WEBAC_ACL);
assertTrue(aclNode.isNodeType(FEDORA_WEBAC_ACL));
}
@Test
public void testFindOrCreateBinaryAcl() throws RepositoryException, InvalidChecksumException {
final String pid = getRandomPid();
final Session jcrSession = getJcrSession(session);
binaryService.findOrCreate(session, "/" + pid).setContent(
new ByteArrayInputStream("binary content".getBytes()),
"text/plain",
null,
null,
null
);
// Retrieve the binary resource and create ACL
final FedoraResource binary = binaryService.findOrCreate(session, "/" + pid);
final FedoraResource binaryAclResource = binary.findOrCreateAcl();
assertNotNull(binaryAclResource);
assertTrue(binaryAclResource instanceof FedoraWebacAcl);
assertEquals("/" + pid + "/" + CONTAINER_WEBAC_ACL, binaryAclResource.getPath());
session.commit();
final javax.jcr.Node aclNode = jcrSession.getNode("/" + pid).getNode(CONTAINER_WEBAC_ACL);
assertTrue(aclNode.isNodeType(FEDORA_WEBAC_ACL));
}
private void addVersionLabel(final String label, final FedoraResource r) throws RepositoryException {
final Session jcrSession = getJcrSession(session);
addVersionLabel(label, jcrSession.getWorkspace().getVersionManager().getBaseVersion(r.getPath()));
}
private static void addVersionLabel(final String label, final Version v) throws RepositoryException {
v.getContainingHistory().addVersionLabel(v.getName(), label, false);
}
private static Instant roundDate(final Instant date) {
return date.minusMillis(date.toEpochMilli() % 1000);
}
}
| |
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.javascript.jscomp.AstFactory.type;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_CLOSURE_CALL_SCOPE_ERROR;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_DESTRUCTURING_FORWARD_DECLARE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_FORWARD_DECLARE_NAMESPACE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_GET_CALL_SCOPE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_GET_NAMESPACE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_REQUIRE_NAMESPACE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.INVALID_REQUIRE_TYPE_NAMESPACE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.MISSING_MODULE_OR_PROVIDE;
import static com.google.javascript.jscomp.ClosurePrimitiveErrors.MODULE_USES_GOOG_MODULE_GET;
import static com.google.javascript.jscomp.ClosureRewriteModule.ILLEGAL_MODULE_RENAMING_CONFLICT;
import static com.google.javascript.jscomp.parsing.parser.FeatureSet.Feature.MODULES;
import com.google.common.base.Splitter;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.javascript.jscomp.ModuleRenaming.GlobalizedModuleName;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.deps.ModuleLoader;
import com.google.javascript.jscomp.modules.Binding;
import com.google.javascript.jscomp.modules.Module;
import com.google.javascript.jscomp.modules.ModuleMap;
import com.google.javascript.jscomp.modules.ModuleMetadataMap;
import com.google.javascript.jscomp.modules.ModuleMetadataMap.ModuleMetadata;
import com.google.javascript.jscomp.parsing.parser.FeatureSet.Feature;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.QualifiedName;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Rewrites a ES6 module into a form that can be safely concatenated. Note that we treat a file as
* an ES6 module if it has at least one import or export statement.
*
* <p>Also rewrites any goog.{require,requireType,forwardDeclare,goog.module.get} calls that are
* either in an ES module or of an ES module using goog.declareModuleId.
*/
public final class Es6RewriteModules implements CompilerPass, NodeTraversal.Callback {
static final DiagnosticType LHS_OF_GOOG_REQUIRE_MUST_BE_CONST =
DiagnosticType.error(
"JSC_LHS_OF_GOOG_REQUIRE_MUST_BE_CONST",
"The left side of a goog.require() or goog.requireType() "
+ "must use ''const'' (not ''let'' or ''var'')");
static final DiagnosticType REQUIRE_TYPE_FOR_ES6_SHOULD_BE_CONST =
DiagnosticType.error(
"JSC_REQUIRE_TYPE_FOR_ES6_SHOULD_BE_CONST",
"goog.requireType alias for ES6 module should be const.");
static final DiagnosticType FORWARD_DECLARE_FOR_ES6_SHOULD_BE_CONST =
DiagnosticType.error(
"JSC_FORWARD_DECLARE_FOR_ES6_SHOULD_BE_CONST",
"goog.forwardDeclare alias for ES6 module should be const.");
static final DiagnosticType SHOULD_IMPORT_ES6_MODULE =
DiagnosticType.warning(
"JSC_SHOULD_IMPORT_ES6_MODULE",
"ES6 modules should import other ES6 modules rather than goog.require them.");
private final AbstractCompiler compiler;
private final AstFactory astFactory;
private final JSType unknownType;
private static final Splitter DOT_SPLITTER = Splitter.on(".");
@Nullable private final PreprocessorSymbolTable preprocessorSymbolTable;
/**
* Local variable names that were goog.require'd to qualified name we need to line.
*
* <p>We need to inline all required names since there are certain well-known Closure symbols
* (like goog.asserts) that later stages of the compiler check for and cannot handle aliases.
*
* <p>We use this to rewrite something like:
*
* <pre>
* import {x} from '';
* const {assert} = goog.require('goog.asserts');
* assert(x);
* </pre>
*
* To:
*
* <pre>
* import {x} from '';
* goog.asserts.assert(x);
* </pre>
*
* Because if we used an alias like below the assertion would not be recognized:
*
* <pre>
* import {x} from '';
* const {assert} = goog.asserts;
* assert(x);
* </pre>
*/
// TODO(johnplaisted): This is actually incorrect if the require'd thing is mutated. But we need
// it so that things like goog.asserts work. Mutated closure symbols are a lot rarer than needing
// to use asserts and the like. Until there's a better solution to finding aliases of well known
// symbols we have to inline anything that is require'd.
private Map<String, GlobalizedModuleName> namesToInlineByAlias;
private Set<String> typedefs;
private final ModuleMetadataMap moduleMetadataMap;
private final ModuleMap moduleMap;
private final TypedScope globalTypedScope;
/**
* Creates a new Es6RewriteModules instance which can be used to rewrite ES6 modules to a
* concatenable form.
*/
Es6RewriteModules(
AbstractCompiler compiler,
ModuleMetadataMap moduleMetadataMap,
ModuleMap moduleMap,
@Nullable PreprocessorSymbolTable preprocessorSymbolTable,
@Nullable TypedScope globalTypedScope) {
checkNotNull(moduleMetadataMap);
this.compiler = compiler;
this.astFactory = compiler.createAstFactory();
this.moduleMetadataMap = moduleMetadataMap;
this.moduleMap = moduleMap;
this.preprocessorSymbolTable = preprocessorSymbolTable;
this.globalTypedScope = globalTypedScope;
this.unknownType = compiler.getTypeRegistry().getNativeType(JSTypeNative.UNKNOWN_TYPE);
}
/**
* Return whether or not the given script node represents an ES6 module file.
*/
public static boolean isEs6ModuleRoot(Node scriptNode) {
checkArgument(scriptNode.isScript(), scriptNode);
if (scriptNode.getBooleanProp(Node.GOOG_MODULE)) {
return false;
}
return scriptNode.hasChildren() && scriptNode.getFirstChild().isModuleBody();
}
@Override
public void process(Node externs, Node root) {
checkArgument(externs.isRoot(), externs);
checkArgument(root.isRoot(), root);
NodeTraversal.traverseRoots(compiler, this, externs, root);
compiler.setFeatureSet(compiler.getFeatureSet().without(MODULES));
// This pass may add getters properties on module objects.
GatherGetterAndSetterProperties.update(compiler, externs, root);
}
private void clearPerFileState() {
this.typedefs = new HashSet<>();
this.namesToInlineByAlias = new HashMap<>();
}
/**
* Checks for goog.require, goog.requireType, goog.module.get and goog.forwardDeclare calls that
* are meant to import ES6 modules and rewrites them.
*/
private class RewriteRequiresForEs6Modules extends AbstractPostOrderCallback {
private boolean transpiled = false;
// An (s, old, new) entry indicates that occurrences of `old` in scope `s` should be rewritten
// as `new`. This is used to rewrite namespaces that appear in calls to goog.requireType and
// goog.forwardDeclare.
private Table<Node, String, String> renameTable;
void rewrite(Node scriptNode) {
transpiled = false;
renameTable = HashBasedTable.create();
NodeTraversal.traverse(compiler, scriptNode, this);
if (transpiled) {
scriptNode.putBooleanProp(Node.TRANSPILED, true);
}
if (!renameTable.isEmpty()) {
NodeTraversal.traverse(
compiler, scriptNode, new Es6RenameReferences(renameTable, /* typesOnly= */ true));
}
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!n.isCall()) {
return;
}
boolean isRequire = n.getFirstChild().matchesQualifiedName("goog.require");
boolean isRequireType = n.getFirstChild().matchesQualifiedName("goog.requireType");
boolean isGet = n.getFirstChild().matchesQualifiedName("goog.module.get");
boolean isForwardDeclare = n.getFirstChild().matchesQualifiedName("goog.forwardDeclare");
if (!isRequire && !isRequireType && !isGet && !isForwardDeclare) {
return;
}
if (!n.hasTwoChildren() || !n.getLastChild().isStringLit()) {
if (isRequire) {
t.report(n, INVALID_REQUIRE_NAMESPACE);
} else if (isRequireType) {
t.report(n, INVALID_REQUIRE_TYPE_NAMESPACE);
} else if (isGet) {
t.report(n, INVALID_GET_NAMESPACE);
} else {
t.report(n, INVALID_FORWARD_DECLARE_NAMESPACE);
}
return;
}
String name = n.getLastChild().getString();
ModuleMetadata moduleMetadata = moduleMetadataMap.getModulesByGoogNamespace().get(name);
if (moduleMetadata == null || !moduleMetadata.isEs6Module()) {
return;
}
// TODO(johnplaisted): Once we have an alternative to forwardDeclare / requireType that
// doesn't require Closure Library warn about those too.
// TODO(johnplaisted): Once we have import() support warn about goog.module.get.
if (isRequire) {
ModuleMetadata currentModuleMetadata =
moduleMetadataMap.getModulesByPath().get(t.getInput().getPath().toString());
if (currentModuleMetadata != null && currentModuleMetadata.isEs6Module()) {
t.report(n, SHOULD_IMPORT_ES6_MODULE);
}
}
if (isGet && t.inGlobalHoistScope()) {
t.report(n, INVALID_GET_CALL_SCOPE);
return;
}
Node statementNode = NodeUtil.getEnclosingStatement(n);
boolean importHasAlias = NodeUtil.isNameDeclaration(statementNode);
if (importHasAlias) {
if (statementNode.getFirstChild().isDestructuringLhs()) {
if (isForwardDeclare) {
// const {a, c:b} = goog.forwardDeclare('an.es6.namespace');
t.report(n, INVALID_DESTRUCTURING_FORWARD_DECLARE);
return;
}
if (isRequireType) {
if (!statementNode.isConst()) {
t.report(statementNode, REQUIRE_TYPE_FOR_ES6_SHOULD_BE_CONST);
return;
}
// const {a, c:b} = goog.requireType('an.es6.namespace');
for (Node child = statementNode.getFirstFirstChild().getFirstChild();
child != null;
child = child.getNext()) {
checkState(child.isStringKey());
checkState(child.getFirstChild().isName());
renameTable.put(
t.getScopeRoot(),
child.getFirstChild().getString(),
ModuleRenaming.getGlobalName(moduleMetadata, name)
.getprop(child.getString())
.join());
}
} else {
// Work around a bug in the type checker where destructuring can create
// too many layers of aliases and confuse the type checker. b/112061124.
// const {a, c:b} = goog.require('an.es6.namespace');
// const a = module$es6.a;
// const b = module$es6.c;
for (Node child = statementNode.getFirstFirstChild().getFirstChild();
child != null;
child = child.getNext()) {
checkState(child.isStringKey());
checkState(child.getFirstChild().isName());
GlobalizedModuleName globalName =
getGlobalNameAndType(
moduleMetadata, name, /* isFromMissingModuleOrProvide= */ false)
.getprop(child.getString());
Node constNode =
astFactory.createSingleConstNameDeclaration(
child.getFirstChild().getString(), globalName.toQname(astFactory));
constNode.srcrefTree(child);
constNode.insertBefore(statementNode);
}
}
statementNode.detach();
t.reportCodeChange();
} else {
if (isForwardDeclare || isRequireType) {
if (!statementNode.isConst()) {
DiagnosticType diagnostic =
isForwardDeclare
? FORWARD_DECLARE_FOR_ES6_SHOULD_BE_CONST
: REQUIRE_TYPE_FOR_ES6_SHOULD_BE_CONST;
t.report(statementNode, diagnostic);
return;
}
// const namespace = goog.forwardDeclare('an.es6.namespace');
// const namespace = goog.requireType('an.es6.namespace');
renameTable.put(
t.getScopeRoot(),
statementNode.getFirstChild().getString(),
ModuleRenaming.getGlobalName(moduleMetadata, name).join());
statementNode.detach();
t.reportCodeChange();
} else {
// const module = goog.require('an.es6.namespace');
// const module = module$es6;
n.replaceWith(
astFactory
.createName(
ModuleRenaming.getGlobalName(moduleMetadata, name).getRoot(), type(n))
.srcrefTree(n));
t.reportCodeChange();
}
}
} else {
if (isForwardDeclare || isRequireType) {
// goog.forwardDeclare('an.es6.namespace')
// goog.requireType('an.es6.namespace')
renameTable.put(
t.getScopeRoot(), name, ModuleRenaming.getGlobalName(moduleMetadata, name).join());
statementNode.detach();
} else {
// goog.require('an.es6.namespace')
if (statementNode.isExprResult() && statementNode.getFirstChild() == n) {
statementNode.detach();
} else {
n.replaceWith(
astFactory
.createName(
ModuleRenaming.getGlobalName(moduleMetadata, name).getRoot(), type(n))
.srcrefTree(n));
}
}
t.reportCodeChange();
}
transpiled = true;
}
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
if (n.isScript()) {
// Trigger creation of the global scope before inserting any synthetic code.
nodeTraversal.getScope();
new RewriteRequiresForEs6Modules().rewrite(n);
if (isEs6ModuleRoot(n)) {
clearPerFileState();
n.putBooleanProp(Node.TRANSPILED, true);
} else {
return false;
}
}
return true;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isImport()) {
maybeWarnExternModule(t, n, parent);
visitImport(t, n, parent);
} else if (n.isExport()) {
maybeWarnExternModule(t, n, parent);
visitExport(t, n, parent);
} else if (n.isScript()) {
visitScript(t, n);
} else if (n.isCall()) {
// TODO(johnplaisted): Consolidate on declareModuleId.
if (n.getFirstChild().matchesQualifiedName("goog.declareModuleId")) {
n.getParent().detach();
}
} else if (n.isImportMeta()) {
// We're choosing to not "support" import.meta because currently all the outputs from the
// compiler are scripts and support for import.meta (only works in modules) would be
// meaningless
t.report(n, Es6ToEs3Util.CANNOT_CONVERT, "import.meta");
}
}
private void maybeWarnExternModule(NodeTraversal t, Node n, Node parent) {
checkState(parent.isModuleBody());
if (parent.isFromExterns() && !NodeUtil.isFromTypeSummary(parent.getParent())) {
t.report(n, Es6ToEs3Util.CANNOT_CONVERT_YET, "ES6 modules in externs");
}
}
private void visitImport(NodeTraversal t, Node importDecl, Node parent) {
checkArgument(parent.isModuleBody(), parent);
String importName = importDecl.getLastChild().getString();
boolean isNamespaceImport = importName.startsWith("goog:");
if (isNamespaceImport) {
// Allow importing Closure namespace objects (e.g. from goog.provide or goog.module) as
// import ... from 'goog:my.ns.Object'.
String namespace = importName.substring("goog:".length());
ModuleMetadata m = moduleMetadataMap.getModulesByGoogNamespace().get(namespace);
if (m == null) {
t.report(importDecl, MISSING_MODULE_OR_PROVIDE, namespace);
} else {
checkState(m.isEs6Module() || m.isGoogModule() || m.isGoogProvide());
}
} else {
ModuleLoader.ModulePath modulePath =
t.getInput()
.getPath()
.resolveJsModule(
importName,
importDecl.getSourceFileName(),
importDecl.getLineno(),
importDecl.getCharno());
if (modulePath == null) {
// The module loader issues an error
// Fall back to assuming the module is a file path
modulePath = t.getInput().getPath().resolveModuleAsPath(importName);
}
maybeAddImportedFileReferenceToSymbolTable(importDecl.getLastChild(), modulePath.toString());
// TODO(johnplaisted): Use ModuleMetadata to ensure the path required is CommonJs or ES6 and
// if not give a better error.
}
for (Node child = importDecl.getFirstChild(); child != null; child = child.getNext()) {
if (child.isImportSpecs()) {
for (Node grandChild = child.getFirstChild();
grandChild != null;
grandChild = grandChild.getNext()) {
maybeAddAliasToSymbolTable(grandChild.getFirstChild(), t.getSourceName());
checkState(grandChild.hasTwoChildren());
}
} else if (child.isImportStar()) {
// import * as ns from "mod"
maybeAddAliasToSymbolTable(child, t.getSourceName());
}
}
importDecl.detach();
t.reportCodeChange();
}
private void visitExport(NodeTraversal t, Node export, Node parent) {
checkArgument(parent.isModuleBody(), parent);
if (export.getBooleanProp(Node.EXPORT_DEFAULT)) {
// export default
// If the thing being exported is a class or function that has a name,
// extract it from the export statement, so that it can be referenced
// from within the module.
//
// export default class X {} -> class X {}; ... moduleName.default = X;
// export default function X() {} -> function X() {}; ... moduleName.default = X;
//
// Otherwise, create a local variable for it and export that.
//
// export default 'someExpression'
// ->
// var $jscompDefaultExport = 'someExpression';
// ...
// moduleName.default = $jscompDefaultExport;
Node child = export.getFirstChild();
String name = null;
if (child.isFunction() || child.isClass()) {
name = NodeUtil.getName(child);
}
if (name != null) {
Node decl = child.detach();
export.replaceWith(decl);
} else {
Node var =
astFactory.createSingleVarNameDeclaration(
ModuleRenaming.DEFAULT_EXPORT_VAR_PREFIX, export.removeFirstChild());
var.setJSDocInfo(child.getJSDocInfo());
child.setJSDocInfo(null);
var.srcrefTreeIfMissing(export);
export.replaceWith(var);
}
t.reportCodeChange();
} else if (export.getBooleanProp(Node.EXPORT_ALL_FROM)
|| export.hasTwoChildren()
|| export.getFirstChild().getToken() == Token.EXPORT_SPECS) {
// export * from 'moduleIdentifier';
// export {x, y as z} from 'moduleIdentifier';
// export {Foo};
export.detach();
t.reportCodeChange();
} else {
visitExportDeclaration(t, export);
}
}
private void visitExportNameDeclaration(Node declaration) {
// export var Foo;
// export let {a, b:[c,d]} = {};
List<Node> lhsNodes = NodeUtil.findLhsNodesInNode(declaration);
for (Node lhs : lhsNodes) {
checkState(lhs.isName());
String name = lhs.getString();
if (declaration.getJSDocInfo() != null && declaration.getJSDocInfo().hasTypedefType()) {
typedefs.add(name);
}
}
}
private void visitExportDeclaration(NodeTraversal t, Node export) {
// export var Foo;
// export function Foo() {}
// etc.
Node declaration = export.getFirstChild();
if (NodeUtil.isNameDeclaration(declaration)) {
visitExportNameDeclaration(declaration);
}
export.replaceWith(declaration.detach());
t.reportCodeChange();
}
private void inlineModuleToGlobalScope(Node moduleNode) {
checkState(moduleNode.isModuleBody());
Node scriptNode = moduleNode.getParent();
moduleNode.detach();
scriptNode.addChildrenToFront(moduleNode.removeChildren());
}
private void visitScript(NodeTraversal t, Node script) {
final Node moduleBody = script.getFirstChild();
// TypedScopeCreator sets the module object type on the MODULE_BODY during type checking.
final AstFactory.Type moduleObjectType = type(moduleBody);
inlineModuleToGlobalScope(moduleBody);
ClosureRewriteModule.checkAndSetStrictModeDirective(t, script);
Module thisModule = moduleMap.getModule(t.getInput().getPath());
QualifiedName qualifiedName =
ModuleRenaming.getGlobalName(thisModule.metadata(), /* googNamespace= */ null);
checkState(qualifiedName.isSimple(), "Unexpected qualified name %s", qualifiedName);
String moduleName = qualifiedName.getRoot();
Node moduleVar = createExportsObject(moduleName, t, script, moduleObjectType);
// Rename vars to not conflict in global scope.
NodeTraversal.traverse(compiler, script, new RenameGlobalVars(thisModule));
// Rename the exports object to something we can reference later.
moduleVar.getFirstChild().setString(moduleName);
moduleVar.makeNonIndexableRecursive();
declareGlobalVariable(moduleVar.getFirstChild(), t);
// rewriteRequires is here (rather than being part of the main visit() method, because we only
// want to rewrite the requires if this is an ES6 module. Note that we also want to do this
// AFTER renaming all module scoped vars in the event that something that is goog.require'd is
// a global, unqualified name (e.g. if "goog.provide('foo')" exists, we don't want to rewrite
// "const foo = goog.require('foo')" to "const foo = foo". If we rewrite our module scoped names
// first then we'll rewrite to "const foo$module$fudge = goog.require('foo')", then to
// "const foo$module$fudge = foo".
rewriteRequires(script);
t.reportCodeChange();
}
private Node createExportsObject(
String moduleName, NodeTraversal t, Node script, AstFactory.Type moduleObjectType) {
Node moduleObject = astFactory.createObjectLit(moduleObjectType);
// Going to get renamed by RenameGlobalVars, so the name we choose here doesn't matter as long
// as it doesn't collide with an existing variable. (We can't use `moduleName` since then
// RenameGlobalVars will rename all references to `moduleName` incorrectly). We'll fix the name
// in visitScript after the global renaming to ensure it has a name that is deterministic from
// the path.
//
// So after this method we'll have:
// var $jscomp$tmp$exports$module$name = {};
// module$name.exportName = localName;
//
// After RenameGlobalVars:
// var $jscomp$tmp$exports$module$nameglobalized = {};
// module$name.exportName = localName$globalized;
//
// After visitScript:
// var module$name = {};
// module$name.exportName = localName$globalized;
Node moduleVar =
astFactory.createSingleVarNameDeclaration("$jscomp$tmp$exports$module$name", moduleObject);
moduleVar.getFirstChild().putBooleanProp(Node.MODULE_EXPORT, true);
// TODO(b/144593112): Stop adding JSDoc when this pass moves to always be after typechecking.
JSDocInfo.Builder infoBuilder = JSDocInfo.builder();
infoBuilder.recordConstancy();
moduleVar.setJSDocInfo(infoBuilder.build());
moduleVar.getFirstChild().setDeclaredConstantVar(true);
script.addChildToBack(moduleVar.srcrefTreeIfMissing(script));
Module thisModule = moduleMap.getModule(t.getInput().getPath());
for (Map.Entry<String, Binding> entry : thisModule.namespace().entrySet()) {
String exportedName = entry.getKey();
Binding binding = entry.getValue();
Node nodeForSourceInfo = binding.sourceNode();
boolean mutated = binding.isMutated();
QualifiedName boundVariableQualifiedName = ModuleRenaming.getGlobalName(binding);
checkState(
boundVariableQualifiedName.isSimple(),
"unexpected qualified name: %s",
boundVariableQualifiedName);
String boundVariableName = boundVariableQualifiedName.getRoot();
Node getProp =
astFactory.createGetPropWithoutColor(
astFactory.createName(moduleName, moduleObjectType), exportedName);
getProp.putBooleanProp(Node.MODULE_EXPORT, true);
if (typedefs.contains(exportedName)) {
// /** @typedef {foo} */
// moduleName.foo;
JSDocInfo.Builder builder = JSDocInfo.builder().parseDocumentation();
JSTypeExpression typeExpr =
new JSTypeExpression(
astFactory.createString(exportedName).srcref(nodeForSourceInfo),
script.getSourceFileName());
builder.recordTypedef(typeExpr);
JSDocInfo info = builder.build();
getProp.setJSDocInfo(info);
Node exprResult = astFactory.exprResult(getProp).srcrefTreeIfMissing(nodeForSourceInfo);
script.addChildToBack(exprResult);
} else if (mutated) {
final Node globalExportName = astFactory.createName(boundVariableName, type(getProp));
addGetterExport(script, nodeForSourceInfo, moduleObject, exportedName, globalExportName);
NodeUtil.addFeatureToScript(t.getCurrentScript(), Feature.GETTER, compiler);
} else {
// Avoid the extra complexity of using getters when the property isn't mutated.
// exports.foo = foo;
Node assign =
astFactory.createAssign(
getProp, astFactory.createName(boundVariableName, type(getProp)));
// TODO(b/144593112): Stop adding JSDoc when this pass moves to always be after typechecking
JSDocInfo.Builder builder = JSDocInfo.builder().parseDocumentation();
builder.recordConstancy();
JSDocInfo info = builder.build();
assign.setJSDocInfo(info);
script.addChildToBack(astFactory.exprResult(assign).srcrefTreeIfMissing(nodeForSourceInfo));
}
}
return moduleVar;
}
private void addGetterExport(
Node script, Node forSourceInfo, Node objLit, String exportedName, Node value) {
Node getter = astFactory.createGetterDef(exportedName, value);
getter.putBooleanProp(Node.MODULE_EXPORT, true);
objLit.addChildToBack(getter);
if (!astFactory.isAddingTypes()) {
// TODO(b/143904518): Remove this code when this pass is permanently moved after type checking
// Type checker doesn't infer getters so mark the return as unknown.
// { /** @return {?} */ get foo() { return foo; } }
JSDocInfo.Builder builder = JSDocInfo.builder().parseDocumentation();
builder.recordReturnType(
new JSTypeExpression(
new Node(Token.QMARK).srcref(forSourceInfo), script.getSourceFileName()));
getter.setJSDocInfo(builder.build());
} else {
// For a property typed as number, synthesize a type `function(): number`.
getter.setJSType(compiler.getTypeRegistry().createFunctionType(value.getJSType()));
}
getter.srcrefTreeIfMissing(forSourceInfo);
compiler.reportChangeToEnclosingScope(getter.getFirstChild().getLastChild());
compiler.reportChangeToEnclosingScope(getter);
}
private void rewriteRequires(Node script) {
NodeTraversal.builder()
.setCompiler(compiler)
.setCallback(
(NodeTraversal t, Node n, Node parent) -> {
if (n.isCall()) {
Node fn = n.getFirstChild();
if (fn.matchesQualifiedName("goog.require")
|| fn.matchesQualifiedName("goog.requireType")) {
// TODO(tjgq): This will rewrite both type references and code references. For
// goog.requireType, the latter are potentially broken because the symbols aren't
// guaranteed to be available at run time. A separate pass needs to be added to
// detect these incorrect uses of goog.requireType.
visitRequireOrGet(t, n, parent, /* isRequire= */ true);
} else if (fn.matchesQualifiedName("goog.module.get")) {
visitGoogModuleGet(t, n, parent);
}
}
})
.traverse(script);
NodeTraversal.builder()
.setCompiler(compiler)
.setCallback(
(NodeTraversal t, Node n, Node parent) -> {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
for (Node typeNode : info.getTypeNodes()) {
inlineAliasedTypes(t, typeNode);
}
}
if (n.isName() && namesToInlineByAlias.containsKey(n.getString())) {
Var v = t.getScope().getVar(n.getString());
if (v == null || v.getNameNode() != n) {
GlobalizedModuleName replacementName = namesToInlineByAlias.get(n.getString());
Node replacement = replacementName.toQname(astFactory).srcrefTree(n);
n.replaceWith(replacement);
}
}
})
.traverse(script);
}
private void inlineAliasedTypes(NodeTraversal t, Node typeNode) {
if (typeNode.isStringLit()) {
String name = typeNode.getString();
List<String> split = DOT_SPLITTER.limit(2).splitToList(name);
// We've already removed the alias.
if (t.getScope().getVar(split.get(0)) == null) {
GlobalizedModuleName replacement = namesToInlineByAlias.get(split.get(0));
if (replacement != null) {
String rest = "";
if (split.size() == 2) {
rest = "." + split.get(1);
}
typeNode.setOriginalName(name);
typeNode.setString(replacement.aliasName().join() + rest);
t.reportCodeChange();
}
}
}
for (Node child = typeNode.getFirstChild(); child != null; child = child.getNext()) {
inlineAliasedTypes(t, child);
}
}
private void visitGoogModuleGet(NodeTraversal t, Node getCall, Node parent) {
if (!getCall.hasTwoChildren() || !getCall.getLastChild().isStringLit()) {
t.report(getCall, INVALID_GET_NAMESPACE);
return;
}
// Module has already been turned into a script at this point.
if (t.inGlobalHoistScope()) {
t.report(getCall, MODULE_USES_GOOG_MODULE_GET);
return;
}
visitRequireOrGet(t, getCall, parent, /* isRequire= */ false);
}
/**
* Gets some made-up metadata for the given Closure namespace.
*
* <p>This is used when the namespace is not part of the input so that this pass can be fault
* tolerant and still rewrite to something. Some tools don't care about rewriting correctly and
* just want the type information of this module (e.g. clutz).
*/
private ModuleMetadata getFallbackMetadataForNamespace(String namespace) {
// Assume a provide'd file to be consistent with goog.module rewriting.
ModuleMetadata.Builder builder =
ModuleMetadata.builder()
.moduleType(ModuleMetadataMap.ModuleType.GOOG_PROVIDE)
.usesClosure(true)
.isTestOnly(false);
builder.googNamespacesBuilder().add(namespace);
return builder.build();
}
private void visitRequireOrGet(
NodeTraversal t, Node requireCall, Node parent, boolean isRequire) {
if (!requireCall.hasTwoChildren() || !requireCall.getLastChild().isStringLit()) {
t.report(requireCall, INVALID_REQUIRE_NAMESPACE);
return;
}
// Module has already been turned into a script at this point.
if (isRequire && !t.getScope().isGlobal()) {
t.report(requireCall, INVALID_CLOSURE_CALL_SCOPE_ERROR);
return;
}
String namespace = requireCall.getLastChild().getString();
boolean isStoredInDeclaration = NodeUtil.isDeclaration(parent.getParent());
if (isStoredInDeclaration && !parent.getParent().isConst()) {
compiler.report(JSError.make(parent.getParent(), LHS_OF_GOOG_REQUIRE_MUST_BE_CONST));
}
ModuleMetadata m = moduleMetadataMap.getModulesByGoogNamespace().get(namespace);
boolean isFromFallbackMetadata = m == null;
if (isFromFallbackMetadata) {
t.report(requireCall, MISSING_MODULE_OR_PROVIDE, namespace);
m = getFallbackMetadataForNamespace(namespace);
}
if (isStoredInDeclaration) {
if (isRequire) {
Node toDetach;
if (parent.isDestructuringLhs()) {
checkState(parent.getFirstChild().isObjectPattern());
toDetach = parent.getParent();
for (Node child = parent.getFirstFirstChild(); child != null; child = child.getNext()) {
checkState(child.isStringKey() && child.getFirstChild().isName(), child);
GlobalizedModuleName rep =
getGlobalNameAndType(m, namespace, isFromFallbackMetadata)
.getprop(child.getString());
namesToInlineByAlias.put(child.getFirstChild().getString(), rep);
}
} else if (parent.isName()) {
GlobalizedModuleName alias = getGlobalNameAndType(m, namespace, isFromFallbackMetadata);
namesToInlineByAlias.put(parent.getString(), alias);
toDetach = parent.getParent();
} else {
checkState(parent.isExprResult());
toDetach = parent;
}
toDetach.detach();
} else {
GlobalizedModuleName name = getGlobalNameAndType(m, namespace, isFromFallbackMetadata);
Node replacement = name.toQname(astFactory).srcrefTree(requireCall);
requireCall.replaceWith(replacement);
}
} else {
checkState(requireCall.getParent().isExprResult());
requireCall.getParent().detach();
}
}
/**
* Looks up information about the globalized name and type of a given module
*
* @param metadata Required. The metadata for the module or provide being imported.
* @param googNamespace Optional.
* @param isFromMissingModuleOrProvide Whether the metadata is synthesized fallback metadata
*/
private GlobalizedModuleName getGlobalNameAndType(
ModuleMetadata metadata,
@Nullable String googNamespace,
boolean isFromMissingModuleOrProvide) {
if (isFromMissingModuleOrProvide) {
// The missing namespace presumably does not have a corresponding type defined in the scope.
// Use the unknownType instead of asking ModuleRenaming to look up the type.
QualifiedName globalName = ModuleRenaming.getGlobalName(metadata, googNamespace);
return GlobalizedModuleName.create(globalName, unknownType);
}
return GlobalizedModuleName.create(metadata, googNamespace, globalTypedScope);
}
/**
* Traverses a node tree and
*
* <ol>
* <li>Appends a suffix to all global variable names defined in this module.
* <li>Changes references to imported values to access the exported variable.
* </ol>
*/
private class RenameGlobalVars extends AbstractPostOrderCallback {
private final Module thisModule;
RenameGlobalVars(Module thisModule) {
this.thisModule = thisModule;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
for (Node typeNode : info.getTypeNodes()) {
fixTypeNode(t, typeNode);
}
}
if (n.isName()) {
String name = n.getString();
Var var = t.getScope().getVar(name);
if (var != null && var.isGlobal()) {
// Avoid polluting the global namespace.
String newName =
ModuleRenaming.getGlobalNameOfEsModuleLocalVariable(thisModule.metadata(), name)
.join();
n.setString(newName);
n.setOriginalName(name);
t.reportCodeChange(n);
if (NodeUtil.isDeclarationLValue(n)) {
declareGlobalVariable(n, t);
}
} else if (var == null && thisModule.boundNames().containsKey(name)) {
// Imports have been detached, so they won't show up in scope. Thus if we have a variable
// not in scope that shares the name of an import it is the import.
maybeAddAliasToSymbolTable(n, t.getSourceName());
Binding binding = thisModule.boundNames().get(name);
Node replacement = replace(n, binding);
// `n.x()` may become `foo()`
if (replacement.isName()
&& parent.isCall()
&& parent.getFirstChild() == n
&& parent.getBooleanProp(Node.FREE_CALL)) {
parent.putBooleanProp(Node.FREE_CALL, true);
}
if (NodeUtil.isDeclarationLValue(n)) {
declareGlobalVariable(n, t);
}
t.reportCodeChange();
}
}
}
/**
* Replaces the reference to a given binding. See {@link
* ModuleRenaming#getGlobalNameForJsDoc(ModuleMap, Binding, List)} for a JS Doc version.
*
* <p>For example:
*
* <pre>
* // bar
* export let baz = {qux: 0};
* </pre>
*
* <pre>
* // foo
* import * as bar from 'bar';
* export {bar};
* </pre>
*
* <pre>
* import * as foo from 'foo';
* use(foo.bar.baz.qux);
* </pre>
*
* <p>Should call this method with the binding and node for {@code foo}. In this example any of
* these properties could also be modules. This method will replace as much as the GETPROP as it
* can with module exported variables. Meaning in the above example this would return something
* like "baz$$module$bar.qux", whereas if this method were called for just "foo.bar" it would
* return "module$bar", as it refers to a module object itself.
*
* @param n the node to replace
* @param binding the binding nameNode is a reference to
*/
private Node replace(Node n, Binding binding) {
checkState(n.isName());
while (binding.isModuleNamespace()
&& binding.metadata().isEs6Module()
&& n.getParent().isGetProp()) {
String propertyName = n.getParent().getString();
Module m = moduleMap.getModule(binding.metadata().path());
if (m.namespace().containsKey(propertyName)) {
binding = m.namespace().get(propertyName);
n = n.getParent();
} else {
// This means someone referenced an invalid export on a module object. This should be an
// error, so just rewrite and let the type checker complain later. It isn't a super clear
// error, but we're working on type checking modules soon.
break;
}
}
QualifiedName globalName = ModuleRenaming.getGlobalName(binding);
final Node newNode;
if (!globalName.isSimple()) {
String root = globalName.getRoot();
newNode =
// we might encounter a name not in the global scope when requiring a missing symbol.
globalTypedScope != null && globalTypedScope.hasSlot(root)
? astFactory.createQName(globalTypedScope, globalName.join())
: astFactory.createQNameWithUnknownType(globalName.join());
} else {
// Because this pass does not update the global scope with injected names, t.getScope()
// will not contain a declaration for this global name. Fortunately, we already have the
// JSType on the existing node to pass to AstFactory.
newNode = astFactory.createName(globalName.getRoot(), type(n));
}
// For kythe: the new node only represents the last name it replaced, not all the names.
// e.g. if we rewrite `a.b.c.d.e` to `x.d.e`, then `x` should map to `c`, not `a.b.c`.
n.replaceWith(newNode);
newNode.srcrefTree(n);
newNode.setOriginalName(n.getString());
return newNode;
}
/**
* Replace type name references. Change short names to fully qualified names with namespace
* prefixes. Eg: {Foo} becomes {module$test.Foo}.
*/
private void fixTypeNode(NodeTraversal t, Node typeNode) {
if (typeNode.isStringLit()) {
Module thisModule = moduleMap.getModule(t.getInput().getPath());
String name = typeNode.getString();
List<String> splitted = DOT_SPLITTER.splitToList(name);
String baseName = splitted.get(0);
String rest = "";
if (splitted.size() > 1) {
rest = name.substring(baseName.length());
}
Var var = t.getScope().getVar(baseName);
if (var != null && var.isGlobal()) {
maybeSetNewName(
t,
typeNode,
name,
ModuleRenaming.getGlobalNameOfEsModuleLocalVariable(thisModule.metadata(), baseName)
.join()
+ rest);
} else if (var == null && thisModule.boundNames().containsKey(baseName)) {
// Imports have been detached, so they won't show up in scope. Thus if we have a variable
// not in scope that shares the name of an import it is the import.
Binding binding = thisModule.boundNames().get(baseName);
String globalName =
ModuleRenaming.getGlobalNameForJsDoc(
moduleMap, binding, splitted.subList(1, splitted.size()));
maybeSetNewName(t, typeNode, name, globalName);
if (preprocessorSymbolTable != null) {
// Jsdoc type node is a single STRING node that spans the whole type. For example
// STRING node "bar.Foo". ES6 import rewrite replaces only "module"
// part of the type: "bar.Foo" => "module$full$path$bar$Foo". We have to record
// "bar" as alias.
Node onlyBaseName = Node.newString(baseName).srcref(typeNode);
onlyBaseName.setLength(baseName.length());
maybeAddAliasToSymbolTable(onlyBaseName, t.getSourceName());
}
}
typeNode.setOriginalName(name);
}
for (Node child = typeNode.getFirstChild(); child != null; child = child.getNext()) {
fixTypeNode(t, child);
}
}
private void maybeSetNewName(NodeTraversal t, Node node, String name, String newName) {
if (!name.equals(newName)) {
node.setString(newName);
node.setOriginalName(name);
t.reportCodeChange();
}
}
}
/**
* Add alias nodes to the symbol table as they going to be removed by rewriter. Example aliases:
*
* <pre>
* import * as foo from './foo';
* import {doBar} from './bar';
*
* console.log(doBar);
* </pre>
*
* @param n Alias node. In the example above alias nodes are foo, doBar, and doBar.
* @param module Name of the module currently being processed.
*/
private void maybeAddAliasToSymbolTable(Node n, String module) {
if (preprocessorSymbolTable == null) {
return;
}
n.putBooleanProp(Node.MODULE_ALIAS, true);
// Alias can be used in js types. Types have node type STRING and not NAME so we have to
// use their name as string.
String nodeName =
n.isStringLit() || n.isImportStar()
? n.getString()
: preprocessorSymbolTable.getQualifiedName(n);
// We need to include module as part of the name because aliases are local to current module.
// Aliases with the same name from different module should be completely different entities.
String name = "alias_" + module + "_" + nodeName;
preprocessorSymbolTable.addReference(n, name);
}
/**
* Add reference to a file that current module imports. Example:
*
* <pre>
* import * as qux from '../some/file.js';
* </pre>
*
* <p>Will add a reference to file.js on the string node `'../some/file.js'`.
*
* @param importStringNode String node from the import statement that references imported file. In
* the example above it is the '../some/file.js' STRING node.
* @param importedFilePath Absolute path to the imported file. In the example above it can be
* myproject/folder/some/file.js
*/
private void maybeAddImportedFileReferenceToSymbolTable(
Node importStringNode, String importedFilePath) {
if (preprocessorSymbolTable == null) {
return;
}
// If this if the first import that mentions importedFilePath then we need to create a SCRIPT
// node for the imported file.
if (preprocessorSymbolTable.getSlot(importedFilePath) == null) {
Node scriptNode = compiler.getScriptNode(importedFilePath);
if (scriptNode != null) {
preprocessorSymbolTable.addReference(scriptNode, importedFilePath);
}
}
preprocessorSymbolTable.addReference(importStringNode, importedFilePath);
}
private void declareGlobalVariable(Node n, NodeTraversal t) {
checkState(n.isName());
if (!astFactory.isAddingTypes()) {
return;
}
checkNotNull(this.globalTypedScope);
String name = n.getString();
if (this.globalTypedScope.hasOwnSlot(name)) {
t.report(t.getCurrentScript(), ILLEGAL_MODULE_RENAMING_CONFLICT, name);
} else {
JSType type = checkNotNull(n.getJSType());
this.globalTypedScope.declare(name, n, type, t.getInput(), false);
}
}
}
| |
package com.flushoutsolutions.foheart.models;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import com.flushoutsolutions.foheart.appDataBase.DatabaseContract;
import com.flushoutsolutions.foheart.appDataBase.DatabaseHelper;
import com.flushoutsolutions.foheart.application.FoHeart;
import com.flushoutsolutions.foheart.data.ApplicationData;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Manuel on 10/08/2014.
*/
public class ApplicationModel {
private DatabaseHelper dbHelper = DatabaseHelper.getHelper(FoHeart.getAppContext());
private static ApplicationModel instance = null;
public SQLiteDatabase db = null;
public static ApplicationModel get_model()
{
if (instance==null)
instance = new ApplicationModel();
return instance;
}
private ApplicationModel()
{
}
public synchronized ApplicationData get_data(int id)
{
openDB();
Cursor curApp = db.rawQuery("SELECT * FROM "+ DatabaseContract.ApplicationSchema.TABLE_NAME+" WHERE "+DatabaseContract.ApplicationSchema._ID+ "=" +id, null);
ApplicationData appData = null;
if (curApp.moveToFirst())
{
int _id = curApp.getInt(0);
String code = curApp.getString(1);
String description = curApp.getString(2);
String appVersion = curApp.getString(3);
String baseVersion = curApp.getString(4);
String db_user = curApp.getString(5);
String db_pass = curApp.getString(6);
int updateInterval = curApp.getInt(7);
int debugMode = curApp.getInt(8);
int syncMaster = curApp.getInt(9);
int syncTransaction = curApp.getInt(10);
appData = new ApplicationData(
_id,
code,
description,
appVersion,
baseVersion,
db_user,
db_pass,
updateInterval,
debugMode,
syncMaster,
syncTransaction
);
}
curApp.close();
closeDB();
return appData;
}
public synchronized long add(ApplicationData data)
{
long lastRowId = 0;
if (null!=data)
{
openDB();
ContentValues values = new ContentValues();
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_CODE, data.code);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_DESC, data.description);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_VERSION, data.app_version);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_BASE_VERSION, data.base_version);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DB_USER, data.db_user);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DB_PASS, data.db_pass);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_UPDATE_INTERVAL, data.update_interval);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DEBUG_MODE, data.debug_mode);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_SYNC_MASTER, data.sync_master);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_SYNC_TRANSACTION, data.sync_transaction);
lastRowId = db.insert(DatabaseContract.ApplicationSchema.TABLE_NAME, null, values);
closeDB();
}
return lastRowId;
}
public synchronized int update(ApplicationData data)
{
int rowsAffected = 0;
if (null!=data)
{
openDB();
ContentValues values = new ContentValues();
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_CODE, data.code);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_DESC, data.description);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_VERSION, data.app_version);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_BASE_VERSION, data.base_version);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DB_USER, data.db_user);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DB_PASS, data.db_pass);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_UPDATE_INTERVAL, data.update_interval);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_DEBUG_MODE, data.debug_mode);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_SYNC_MASTER, data.sync_master);
values.put(DatabaseContract.ApplicationSchema.COLUMN_NAME_SYNC_TRANSACTION, data.sync_transaction);
rowsAffected = db.update(DatabaseContract.ApplicationSchema.TABLE_NAME, values, DatabaseContract.ApplicationSchema._ID + "=" + data._id, null);
closeDB();
}
return rowsAffected;
}
public synchronized long save(ApplicationData data)
{
ApplicationData record = this.get_data(data._id);
long result;
if (record==null)
result = add(data);
else
result = update(data);
return result;
}
public synchronized List<ApplicationData> list()
{
openDB();
List<ApplicationData> list = new ArrayList<ApplicationData>();
String selectQuery = "SELECT * FROM " + DatabaseContract.ApplicationSchema.TABLE_NAME;
Cursor curApp = db.rawQuery(selectQuery, null);
if (curApp.moveToFirst())
{
do
{
ApplicationData appData = get_data(curApp.getInt(0));
list.add(appData);
}
while (curApp.moveToNext());
}
curApp.close();
closeDB();
return list;
}
public synchronized void delete(ApplicationData data)
{
openDB();
try
{
db.delete(DatabaseContract.ApplicationSchema.TABLE_NAME, DatabaseContract.ApplicationSchema._ID +"="+data._id, null);
closeDB();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public synchronized void delete_all()
{
openDB();
try
{
db.delete(DatabaseContract.ApplicationSchema.TABLE_NAME, null, null);
closeDB();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public synchronized ApplicationData get_data(String appCode)
{
openDB();
Cursor curApp = db.rawQuery("SELECT * FROM "+DatabaseContract.ApplicationSchema.TABLE_NAME+" WHERE "+DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_CODE+"='"+appCode+"'", null);
ApplicationData appData = null;
if (curApp.moveToFirst())
{
int _id = curApp.getInt(0);
String code = curApp.getString(1);
String description = curApp.getString(2);
String app_version = curApp.getString(3);
String base_version = curApp.getString(4);
String db_user = curApp.getString(5);
String db_pass = curApp.getString(6);
int update_interval = curApp.getInt(7);
int debug_mode = curApp.getInt(8);
int syncMaster = curApp.getInt(9);
int syncTransaction = curApp.getInt(10);
appData = new ApplicationData(
_id,
code,
description,
app_version,
base_version,
db_user,
db_pass,
update_interval,
debug_mode,
syncMaster,
syncTransaction
);
}
curApp.close();
closeDB();
return appData;
}
public synchronized ApplicationData get_data_by_desc(String desc)
{
openDB();
Cursor curApp = db.rawQuery("SELECT * FROM "+DatabaseContract.ApplicationSchema.TABLE_NAME+" WHERE "+DatabaseContract.ApplicationSchema.COLUMN_NAME_APP_DESC+"='"+desc+"'", null);
ApplicationData appData = null;
if (curApp.moveToFirst())
{
int _id = curApp.getInt(0);
String code = curApp.getString(1);
String description = curApp.getString(2);
String app_version = curApp.getString(3);
String base_version = curApp.getString(4);
String db_user = curApp.getString(5);
String db_pass = curApp.getString(6);
int update_interval = curApp.getInt(7);
int debug_mode = curApp.getInt(8);
int syncMaster = curApp.getInt(9);
int syncTransaction = curApp.getInt(10);
appData = new ApplicationData(
_id,
code,
description,
app_version,
base_version,
db_user,
db_pass,
update_interval,
debug_mode,
syncMaster,
syncTransaction
);
}
curApp.close();
closeDB();
return appData;
}
private void openDB(){
if(db == null){
db = dbHelper.getWritableDatabase();
}else{
if(!db.isOpen())
db = dbHelper.getWritableDatabase();
}
}
private void closeDB(){
if(db != null && db.isOpen())
db.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.common.impl;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerType;
import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import java.beans.IntrospectionException;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.File;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map;
import org.yaml.snakeyaml.constructor.AbstractConstruct;
import org.yaml.snakeyaml.constructor.Constructor;
import org.yaml.snakeyaml.introspector.BeanAccess;
import org.yaml.snakeyaml.introspector.Property;
import org.yaml.snakeyaml.introspector.PropertyUtils;
import org.yaml.snakeyaml.nodes.MappingNode;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.Tag;
import org.yaml.snakeyaml.representer.Representer;
import static org.apache.hadoop.ozone.container.keyvalue
.KeyValueContainerData.KEYVALUE_YAML_TAG;
/**
* Class for creating and reading .container files.
*/
public final class ContainerDataYaml {
private static final Logger LOG =
LoggerFactory.getLogger(ContainerDataYaml.class);
private ContainerDataYaml() {
}
/**
* Creates a .container file in yaml format.
*
* @param containerFile
* @param containerData
* @throws IOException
*/
public static void createContainerFile(ContainerType containerType,
ContainerData containerData, File containerFile) throws IOException {
Writer writer = null;
try {
// Create Yaml for given container type
Yaml yaml = getYamlForContainerType(containerType);
// Compute Checksum and update ContainerData
containerData.computeAndSetChecksum(yaml);
// Write the ContainerData with checksum to Yaml file.
writer = new OutputStreamWriter(new FileOutputStream(
containerFile), "UTF-8");
yaml.dump(containerData, writer);
} finally {
try {
if (writer != null) {
writer.close();
}
} catch (IOException ex) {
LOG.warn("Error occurred during closing the writer. ContainerID: " +
containerData.getContainerID());
}
}
}
/**
* Read the yaml file, and return containerData.
*
* @param containerFile
* @throws IOException
*/
public static ContainerData readContainerFile(File containerFile)
throws IOException {
Preconditions.checkNotNull(containerFile, "containerFile cannot be null");
InputStream input = null;
ContainerData containerData;
try {
PropertyUtils propertyUtils = new PropertyUtils();
propertyUtils.setBeanAccess(BeanAccess.FIELD);
propertyUtils.setAllowReadOnlyProperties(true);
Representer representer = new ContainerDataRepresenter();
representer.setPropertyUtils(propertyUtils);
Constructor containerDataConstructor = new ContainerDataConstructor();
Yaml yaml = new Yaml(containerDataConstructor, representer);
yaml.setBeanAccess(BeanAccess.FIELD);
input = new FileInputStream(containerFile);
containerData = (ContainerData)
yaml.load(input);
} finally {
if (input!= null) {
input.close();
}
}
return containerData;
}
/**
* Given a ContainerType this method returns a Yaml representation of
* the container properties.
*
* @param containerType type of container
* @return Yamal representation of container properties
*
* @throws StorageContainerException if the type is unrecognized
*/
public static Yaml getYamlForContainerType(ContainerType containerType)
throws StorageContainerException {
PropertyUtils propertyUtils = new PropertyUtils();
propertyUtils.setBeanAccess(BeanAccess.FIELD);
propertyUtils.setAllowReadOnlyProperties(true);
switch (containerType) {
case KeyValueContainer:
Representer representer = new ContainerDataRepresenter();
representer.setPropertyUtils(propertyUtils);
representer.addClassTag(
KeyValueContainerData.class,
KeyValueContainerData.KEYVALUE_YAML_TAG);
Constructor keyValueDataConstructor = new ContainerDataConstructor();
return new Yaml(keyValueDataConstructor, representer);
default:
throw new StorageContainerException("Unrecognized container Type " +
"format " + containerType, ContainerProtos.Result
.UNKNOWN_CONTAINER_TYPE);
}
}
/**
* Representer class to define which fields need to be stored in yaml file.
*/
private static class ContainerDataRepresenter extends Representer {
@Override
protected Set<Property> getProperties(Class<? extends Object> type)
throws IntrospectionException {
Set<Property> set = super.getProperties(type);
Set<Property> filtered = new TreeSet<Property>();
// When a new Container type is added, we need to add what fields need
// to be filtered here
if (type.equals(KeyValueContainerData.class)) {
List<String> yamlFields = KeyValueContainerData.getYamlFields();
// filter properties
for (Property prop : set) {
String name = prop.getName();
if (yamlFields.contains(name)) {
filtered.add(prop);
}
}
}
return filtered;
}
}
/**
* Constructor class for KeyValueData, which will be used by Yaml.
*/
private static class ContainerDataConstructor extends Constructor {
ContainerDataConstructor() {
//Adding our own specific constructors for tags.
// When a new Container type is added, we need to add yamlConstructor
// for that
this.yamlConstructors.put(
KEYVALUE_YAML_TAG, new ConstructKeyValueContainerData());
this.yamlConstructors.put(Tag.INT, new ConstructLong());
}
private class ConstructKeyValueContainerData extends AbstractConstruct {
public Object construct(Node node) {
MappingNode mnode = (MappingNode) node;
Map<Object, Object> nodes = constructMapping(mnode);
//Needed this, as TAG.INT type is by default converted to Long.
long layOutVersion = (long) nodes.get(OzoneConsts.LAYOUTVERSION);
int lv = (int) layOutVersion;
long size = (long) nodes.get(OzoneConsts.MAX_SIZE_GB);
int maxSize = (int) size;
//When a new field is added, it needs to be added here.
KeyValueContainerData kvData = new KeyValueContainerData(
(long) nodes.get(OzoneConsts.CONTAINER_ID), lv, maxSize);
kvData.setContainerDBType((String)nodes.get(
OzoneConsts.CONTAINER_DB_TYPE));
kvData.setMetadataPath((String) nodes.get(
OzoneConsts.METADATA_PATH));
kvData.setChunksPath((String) nodes.get(OzoneConsts.CHUNKS_PATH));
Map<String, String> meta = (Map) nodes.get(OzoneConsts.METADATA);
kvData.setMetadata(meta);
kvData.setChecksum((String) nodes.get(OzoneConsts.CHECKSUM));
String state = (String) nodes.get(OzoneConsts.STATE);
switch (state) {
case "OPEN":
kvData.setState(ContainerProtos.ContainerLifeCycleState.OPEN);
break;
case "CLOSING":
kvData.setState(ContainerProtos.ContainerLifeCycleState.CLOSING);
break;
case "CLOSED":
kvData.setState(ContainerProtos.ContainerLifeCycleState.CLOSED);
break;
default:
throw new IllegalStateException("Unexpected " +
"ContainerLifeCycleState " + state + " for the containerId " +
nodes.get(OzoneConsts.CONTAINER_ID));
}
return kvData;
}
}
//Below code is taken from snake yaml, as snakeyaml tries to fit the
// number if it fits in integer, otherwise returns long. So, slightly
// modified the code to return long in all cases.
private class ConstructLong extends AbstractConstruct {
public Object construct(Node node) {
String value = constructScalar((ScalarNode) node).toString()
.replaceAll("_", "");
int sign = +1;
char first = value.charAt(0);
if (first == '-') {
sign = -1;
value = value.substring(1);
} else if (first == '+') {
value = value.substring(1);
}
int base = 10;
if ("0".equals(value)) {
return Long.valueOf(0);
} else if (value.startsWith("0b")) {
value = value.substring(2);
base = 2;
} else if (value.startsWith("0x")) {
value = value.substring(2);
base = 16;
} else if (value.startsWith("0")) {
value = value.substring(1);
base = 8;
} else if (value.indexOf(':') != -1) {
String[] digits = value.split(":");
int bes = 1;
int val = 0;
for (int i = 0, j = digits.length; i < j; i++) {
val += (Long.parseLong(digits[(j - i) - 1]) * bes);
bes *= 60;
}
return createNumber(sign, String.valueOf(val), 10);
} else {
return createNumber(sign, value, 10);
}
return createNumber(sign, value, base);
}
}
private Number createNumber(int sign, String number, int radix) {
Number result;
if (sign < 0) {
number = "-" + number;
}
result = Long.valueOf(number, radix);
return result;
}
}
}
| |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.libs.ws.ahc;
import akka.stream.javadsl.Source;
import akka.util.ByteString;
import com.fasterxml.jackson.databind.JsonNode;
import org.w3c.dom.Document;
import play.libs.ws.*;
import play.mvc.Http;
import java.io.File;
import java.io.InputStream;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletionStage;
import java.util.function.Function;
/** A Play WS request backed by AsyncHTTPClient implementation. */
public class AhcWSRequest implements WSRequest {
private static WSBodyWritables writables = new WSBodyWritables() {};
private final AhcWSClient client;
private final StandaloneAhcWSRequest request;
private final Function<StandaloneWSResponse, WSResponse> responseFunction = AhcWSResponse::new;
private final Function<StandaloneWSRequest, WSRequest> converter =
new Function<StandaloneWSRequest, WSRequest>() {
public WSRequest apply(StandaloneWSRequest standaloneWSRequest) {
final StandaloneAhcWSRequest plainAhcWSRequest =
(StandaloneAhcWSRequest) standaloneWSRequest;
return new AhcWSRequest(client, plainAhcWSRequest);
}
};
AhcWSRequest(AhcWSClient client, StandaloneAhcWSRequest request) {
this.client = client;
this.request = request;
}
@Override
public CompletionStage<WSResponse> get() {
return request.get().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(BodyWritable body) {
return request.patch(body).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(String string) {
return request.patch(writables.body(string)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(JsonNode jsonNode) {
return request.patch(writables.body(jsonNode)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(Document doc) {
return request.patch(writables.body(doc)).thenApply(responseFunction);
}
@Deprecated
@Override
public CompletionStage<WSResponse> patch(InputStream inputStream) {
return request.patch(writables.body(() -> inputStream)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(File file) {
return request.patch(writables.body(file)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> patch(
Source<? super Http.MultipartFormData.Part<Source<ByteString, ?>>, ?> bodyPartSource) {
return request.patch(writables.multipartBody(bodyPartSource)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(BodyWritable body) {
return request.post(body).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(String string) {
return request.post(writables.body(string)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(JsonNode json) {
return request.post(writables.body(json)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(Document doc) {
return request.post(writables.body(doc)).thenApply(responseFunction);
}
@Override
@Deprecated
public CompletionStage<WSResponse> post(InputStream is) {
return request.post(writables.body(() -> is)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(File file) {
return request.post(writables.body(file)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> post(
Source<? super Http.MultipartFormData.Part<Source<ByteString, ?>>, ?> bodyPartSource) {
return request.post(writables.multipartBody(bodyPartSource)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(BodyWritable body) {
return request.put(body).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(String string) {
return request.put(writables.body(string)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(JsonNode json) {
return request.put(writables.body(json)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(Document doc) {
return request.put(writables.body(doc)).thenApply(responseFunction);
}
@Override
@Deprecated
public CompletionStage<WSResponse> put(InputStream is) {
return request.put(writables.body(() -> is)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(File file) {
return request.put(writables.body(file)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> put(
Source<? super Http.MultipartFormData.Part<Source<ByteString, ?>>, ?> bodyPartSource) {
return request.put(writables.multipartBody(bodyPartSource)).thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> delete() {
return request.delete().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> head() {
return request.head().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> options() {
return request.options().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> execute(String method) {
return request.setMethod(method).execute().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> execute() {
return request.execute().thenApply(responseFunction);
}
@Override
public CompletionStage<WSResponse> stream() {
return request.stream().thenApply(responseFunction);
}
@Override
public WSRequest setMethod(String method) {
return converter.apply(request.setMethod(method));
}
@Override
public WSRequest setBody(BodyWritable bodyWritable) {
return converter.apply(request.setBody(bodyWritable));
}
@Override
public WSRequest setBody(String string) {
return converter.apply(request.setBody(writables.body(string)));
}
@Override
public WSRequest setBody(JsonNode json) {
return converter.apply(request.setBody(writables.body(json)));
}
@Deprecated
@Override
public WSRequest setBody(InputStream is) {
return converter.apply(request.setBody(writables.body(() -> is)));
}
@Override
public WSRequest setBody(File file) {
return converter.apply(request.setBody(writables.body(file)));
}
@Override
public <U> WSRequest setBody(Source<ByteString, U> source) {
return converter.apply(request.setBody(writables.body(source)));
}
/** @deprecated use addHeader(name, value) */
@Deprecated
@Override
public WSRequest setHeader(String name, String value) {
return converter.apply(request.addHeader(name, value));
}
@Override
public WSRequest setHeaders(Map<String, List<String>> headers) {
return converter.apply(request.setHeaders(headers));
}
@Override
public WSRequest addHeader(String name, String value) {
return converter.apply(request.addHeader(name, value));
}
@Override
public WSRequest setQueryString(String query) {
return converter.apply(request.setQueryString(query));
}
/** @deprecated Use addQueryParameter */
@Deprecated
@Override
public WSRequest setQueryParameter(String name, String value) {
return converter.apply(request.addQueryParameter(name, value));
}
@Override
public WSRequest addQueryParameter(String name, String value) {
return converter.apply(request.addQueryParameter(name, value));
}
@Override
public WSRequest setQueryString(Map<String, List<String>> params) {
return converter.apply(request.setQueryString(params));
}
@Override
public StandaloneWSRequest setUrl(String url) {
return converter.apply(request.setUrl(url));
}
@Override
public WSRequest addCookie(WSCookie cookie) {
return converter.apply(request.addCookie(cookie));
}
@Override
public WSRequest addCookie(Http.Cookie cookie) {
return converter.apply(request.addCookie(asCookie(cookie)));
}
private WSCookie asCookie(Http.Cookie cookie) {
return new DefaultWSCookie(
cookie.name(),
cookie.value(),
cookie.domain(),
cookie.path(),
Optional.ofNullable(cookie.maxAge())
.map(Integer::longValue)
.filter(f -> f > -1L)
.orElse(null),
cookie.secure(),
cookie.httpOnly());
}
@Override
public WSRequest addCookies(WSCookie... cookies) {
return converter.apply(request.addCookies(cookies));
}
@Override
public WSRequest setCookies(List<WSCookie> cookies) {
return converter.apply(request.setCookies(cookies));
}
@Override
public WSRequest setAuth(String userInfo) {
return converter.apply(request.setAuth(userInfo));
}
@Override
public WSRequest setAuth(String username, String password) {
return converter.apply(request.setAuth(username, password));
}
@Override
public WSRequest setAuth(String username, String password, WSAuthScheme scheme) {
return converter.apply(request.setAuth(username, password, scheme));
}
@Override
public StandaloneWSRequest setAuth(WSAuthInfo authInfo) {
return converter.apply(request.setAuth(authInfo));
}
@Override
public WSRequest sign(WSSignatureCalculator calculator) {
return converter.apply(request.sign(calculator));
}
@Override
public WSRequest setFollowRedirects(boolean followRedirects) {
return converter.apply(request.setFollowRedirects(followRedirects));
}
@Override
public WSRequest setVirtualHost(String virtualHost) {
return converter.apply(request.setVirtualHost(virtualHost));
}
/**
* @deprecated Use {@link #setRequestTimeout(Duration timeout)}
* @param timeout the request timeout in milliseconds. A value of -1 indicates an infinite request
* timeout.
*/
@Deprecated
@Override
public WSRequest setRequestTimeout(long timeout) {
Duration d;
if (timeout == -1) {
d = Duration.of(1, ChronoUnit.YEARS);
} else {
d = Duration.ofMillis(timeout);
}
return converter.apply(request.setRequestTimeout(d));
}
@Override
public WSRequest setRequestTimeout(Duration timeout) {
return converter.apply(request.setRequestTimeout(timeout));
}
@Override
public WSRequest setRequestFilter(WSRequestFilter filter) {
return converter.apply(request.setRequestFilter(filter));
}
@Override
public WSRequest setContentType(String contentType) {
return converter.apply(request.setContentType(contentType));
}
@Override
public Optional<WSAuthInfo> getAuth() {
return request.getAuth();
}
@Override
public Optional<BodyWritable> getBody() {
return request.getBody();
}
@Override
public Optional<WSSignatureCalculator> getCalculator() {
return request.getCalculator();
}
@Override
public Optional<String> getContentType() {
return request.getContentType();
}
@Override
public Optional<Boolean> getFollowRedirects() {
return request.getFollowRedirects();
}
@Override
public String getUrl() {
return request.getUrl();
}
@Override
public Map<String, List<String>> getHeaders() {
return request.getHeaders();
}
@Override
public List<String> getHeaderValues(String name) {
return request.getHeaderValues(name);
}
@Override
public Optional<String> getHeader(String name) {
return request.getHeader(name);
}
@Override
public Optional<Duration> getRequestTimeout() {
return request.getRequestTimeout();
}
@Override
public Map<String, List<String>> getQueryParameters() {
return request.getQueryParameters();
}
}
| |
/*
* Copyright (c) 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.ec;
import java.security.*;
import java.security.interfaces.*;
import java.security.spec.*;
/**
* KeyFactory for EC keys. Keys must be instances of PublicKey or PrivateKey
* and getAlgorithm() must return "EC". For such keys, it supports conversion
* between the following:
*
* For public keys:
* . PublicKey with an X.509 encoding
* . ECPublicKey
* . ECPublicKeySpec
* . X509EncodedKeySpec
*
* For private keys:
* . PrivateKey with a PKCS#8 encoding
* . ECPrivateKey
* . ECPrivateKeySpec
* . PKCS8EncodedKeySpec
*
* @since 1.6
* @author Andreas Sterbenz
*/
public final class ECKeyFactory extends KeyFactorySpi {
// Used by translateKey() and the SunPKCS11 provider
public final static KeyFactory INSTANCE;
// Internal provider object we can obtain the KeyFactory and
// AlgorithmParameters from. Used by ECParameters and AlgorithmId.
// This can go away once we have EC always available in the SUN provider.
// Used by ECParameters and AlgorithmId.
public final static Provider ecInternalProvider;
static {
final Provider p = new Provider("SunEC-Internal", 1.0d, null) {};
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
p.put("KeyFactory.EC", "sun.security.ec.ECKeyFactory");
p.put("AlgorithmParameters.EC", "sun.security.ec.ECParameters");
p.put("Alg.Alias.AlgorithmParameters.1.2.840.10045.2.1", "EC");
return null;
}
});
try {
INSTANCE = KeyFactory.getInstance("EC", p);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
ecInternalProvider = p;
}
public ECKeyFactory() {
// empty
}
/**
* Static method to convert Key into a useable instance of
* ECPublicKey or ECPrivateKey. Check the key and convert it
* to a Sun key if necessary. If the key is not an EC key
* or cannot be used, throw an InvalidKeyException.
*
* The difference between this method and engineTranslateKey() is that
* we do not convert keys of other providers that are already an
* instance of ECPublicKey or ECPrivateKey.
*
* To be used by future Java ECDSA and ECDH implementations.
*/
public static ECKey toECKey(Key key) throws InvalidKeyException {
if (key instanceof ECKey) {
ECKey ecKey = (ECKey)key;
checkKey(ecKey);
return ecKey;
} else {
return (ECKey)INSTANCE.translateKey(key);
}
}
/**
* Check that the given EC key is valid.
*/
private static void checkKey(ECKey key) throws InvalidKeyException {
// check for subinterfaces, omit additional checks for our keys
if (key instanceof ECPublicKey) {
if (key instanceof ECPublicKeyImpl) {
return;
}
} else if (key instanceof ECPrivateKey) {
if (key instanceof ECPrivateKeyImpl) {
return;
}
} else {
throw new InvalidKeyException("Neither a public nor a private key");
}
// ECKey does not extend Key, so we need to do a cast
String keyAlg = ((Key)key).getAlgorithm();
if (keyAlg.equals("EC") == false) {
throw new InvalidKeyException("Not an EC key: " + keyAlg);
}
// XXX further sanity checks about whether this key uses supported
// fields, point formats, etc. would go here
}
/**
* Translate an EC key into a Sun EC key. If conversion is
* not possible, throw an InvalidKeyException.
* See also JCA doc.
*/
protected Key engineTranslateKey(Key key) throws InvalidKeyException {
if (key == null) {
throw new InvalidKeyException("Key must not be null");
}
String keyAlg = key.getAlgorithm();
if (keyAlg.equals("EC") == false) {
throw new InvalidKeyException("Not an EC key: " + keyAlg);
}
if (key instanceof PublicKey) {
return implTranslatePublicKey((PublicKey)key);
} else if (key instanceof PrivateKey) {
return implTranslatePrivateKey((PrivateKey)key);
} else {
throw new InvalidKeyException("Neither a public nor a private key");
}
}
// see JCA doc
protected PublicKey engineGeneratePublic(KeySpec keySpec)
throws InvalidKeySpecException {
try {
return implGeneratePublic(keySpec);
} catch (InvalidKeySpecException e) {
throw e;
} catch (GeneralSecurityException e) {
throw new InvalidKeySpecException(e);
}
}
// see JCA doc
protected PrivateKey engineGeneratePrivate(KeySpec keySpec)
throws InvalidKeySpecException {
try {
return implGeneratePrivate(keySpec);
} catch (InvalidKeySpecException e) {
throw e;
} catch (GeneralSecurityException e) {
throw new InvalidKeySpecException(e);
}
}
// internal implementation of translateKey() for public keys. See JCA doc
private PublicKey implTranslatePublicKey(PublicKey key)
throws InvalidKeyException {
if (key instanceof ECPublicKey) {
if (key instanceof ECPublicKeyImpl) {
return key;
}
ECPublicKey ecKey = (ECPublicKey)key;
return new ECPublicKeyImpl(
ecKey.getW(),
ecKey.getParams()
);
} else if ("X.509".equals(key.getFormat())) {
byte[] encoded = key.getEncoded();
return new ECPublicKeyImpl(encoded);
} else {
throw new InvalidKeyException("Public keys must be instance "
+ "of ECPublicKey or have X.509 encoding");
}
}
// internal implementation of translateKey() for private keys. See JCA doc
private PrivateKey implTranslatePrivateKey(PrivateKey key)
throws InvalidKeyException {
if (key instanceof ECPrivateKey) {
if (key instanceof ECPrivateKeyImpl) {
return key;
}
ECPrivateKey ecKey = (ECPrivateKey)key;
return new ECPrivateKeyImpl(
ecKey.getS(),
ecKey.getParams()
);
} else if ("PKCS#8".equals(key.getFormat())) {
return new ECPrivateKeyImpl(key.getEncoded());
} else {
throw new InvalidKeyException("Private keys must be instance "
+ "of ECPrivateKey or have PKCS#8 encoding");
}
}
// internal implementation of generatePublic. See JCA doc
private PublicKey implGeneratePublic(KeySpec keySpec)
throws GeneralSecurityException {
if (keySpec instanceof X509EncodedKeySpec) {
X509EncodedKeySpec x509Spec = (X509EncodedKeySpec)keySpec;
return new ECPublicKeyImpl(x509Spec.getEncoded());
} else if (keySpec instanceof ECPublicKeySpec) {
ECPublicKeySpec ecSpec = (ECPublicKeySpec)keySpec;
return new ECPublicKeyImpl(
ecSpec.getW(),
ecSpec.getParams()
);
} else {
throw new InvalidKeySpecException("Only ECPublicKeySpec "
+ "and X509EncodedKeySpec supported for EC public keys");
}
}
// internal implementation of generatePrivate. See JCA doc
private PrivateKey implGeneratePrivate(KeySpec keySpec)
throws GeneralSecurityException {
if (keySpec instanceof PKCS8EncodedKeySpec) {
PKCS8EncodedKeySpec pkcsSpec = (PKCS8EncodedKeySpec)keySpec;
return new ECPrivateKeyImpl(pkcsSpec.getEncoded());
} else if (keySpec instanceof ECPrivateKeySpec) {
ECPrivateKeySpec ecSpec = (ECPrivateKeySpec)keySpec;
return new ECPrivateKeyImpl(ecSpec.getS(), ecSpec.getParams());
} else {
throw new InvalidKeySpecException("Only ECPrivateKeySpec "
+ "and PKCS8EncodedKeySpec supported for EC private keys");
}
}
protected <T extends KeySpec> T engineGetKeySpec(Key key, Class<T> keySpec)
throws InvalidKeySpecException {
try {
// convert key to one of our keys
// this also verifies that the key is a valid EC key and ensures
// that the encoding is X.509/PKCS#8 for public/private keys
key = engineTranslateKey(key);
} catch (InvalidKeyException e) {
throw new InvalidKeySpecException(e);
}
if (key instanceof ECPublicKey) {
ECPublicKey ecKey = (ECPublicKey)key;
if (ECPublicKeySpec.class.isAssignableFrom(keySpec)) {
return (T) new ECPublicKeySpec(
ecKey.getW(),
ecKey.getParams()
);
} else if (X509EncodedKeySpec.class.isAssignableFrom(keySpec)) {
return (T) new X509EncodedKeySpec(key.getEncoded());
} else {
throw new InvalidKeySpecException
("KeySpec must be ECPublicKeySpec or "
+ "X509EncodedKeySpec for EC public keys");
}
} else if (key instanceof ECPrivateKey) {
if (PKCS8EncodedKeySpec.class.isAssignableFrom(keySpec)) {
return (T) new PKCS8EncodedKeySpec(key.getEncoded());
} else if (ECPrivateKeySpec.class.isAssignableFrom(keySpec)) {
ECPrivateKey ecKey = (ECPrivateKey)key;
return (T) new ECPrivateKeySpec(
ecKey.getS(),
ecKey.getParams()
);
} else {
throw new InvalidKeySpecException
("KeySpec must be ECPrivateKeySpec or "
+ "PKCS8EncodedKeySpec for EC private keys");
}
} else {
// should not occur, caught in engineTranslateKey()
throw new InvalidKeySpecException("Neither public nor private key");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.font.encoding;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* PostScript glyph list, maps glyph names to sequences of Unicode characters.
* Instances of GlyphList are immutable.
*/
public final class GlyphList
{
private static final Log LOG = LogFactory.getLog(GlyphList.class);
private static final GlyphList DEFAULT;
private static final GlyphList ZAPF_DINGBATS;
/**
* Returns the Adobe Glyph List (AGL).
*/
public static GlyphList getAdobeGlyphList()
{
return DEFAULT;
}
/**
* Returns the Zapf Dingbats glyph list.
*/
public static GlyphList getZapfDingbats()
{
return ZAPF_DINGBATS;
}
static
{
try
{
ClassLoader loader = GlyphList.class.getClassLoader();
String path = "org/apache/pdfbox/resources/glyphlist/";
// Adobe Glyph List (AGL)
DEFAULT = new GlyphList(loader.getResourceAsStream(path + "glyphlist.txt"));
// Zapf Dingbats has its own glyph list
ZAPF_DINGBATS = new GlyphList(loader.getResourceAsStream(path + "zapfdingbats.txt"));
// not supported in PDFBox 2.0, but we issue a warning, see PDFBOX-2379
try
{
String location = System.getProperty("glyphlist_ext");
if (location != null)
{
throw new UnsupportedOperationException("glyphlist_ext is no longer supported, "
+ "use GlyphList.DEFAULT.addGlyphs(Properties) instead");
}
}
catch (SecurityException e) // can occur on System.getProperty
{
// PDFBOX-1946 ignore and continue
}
}
catch (IOException e)
{
throw new RuntimeException(e);
}
}
private final Map<String, String> nameToUnicode;
private final Map<String, String> unicodeToName;
/**
* Creates a new GlyphList from a glyph list file.
*
* @param input glyph list in Adobe format
* @throws IOException if the glyph list could not be read
*/
public GlyphList(InputStream input) throws IOException
{
nameToUnicode = new HashMap<String, String>();
unicodeToName = new HashMap<String, String>();
loadList(input);
}
/**
* Creates a new GlyphList from multiple glyph list files.
*
* @param glyphList an existing glyph list to be copied
* @param input glyph list in Adobe format
* @throws IOException if the glyph list could not be read
*/
public GlyphList(GlyphList glyphList, InputStream input) throws IOException
{
nameToUnicode = new HashMap<String, String>(glyphList.nameToUnicode);
unicodeToName = new HashMap<String, String>(glyphList.unicodeToName);
loadList(input);
}
private void loadList(InputStream input) throws IOException
{
BufferedReader in = new BufferedReader(new InputStreamReader(input, "ISO-8859-1"));
try
{
while (in.ready())
{
String line = in.readLine();
if (!line.startsWith("#"))
{
String[] parts = line.split(";");
if (parts.length < 2)
{
throw new IOException("Invalid glyph list entry: " + line);
}
String name = parts[0];
String[] unicodeList = parts[1].split(" ");
if (nameToUnicode.containsKey(name))
{
LOG.warn("duplicate value for " + name + " -> " + parts[1] + " " +
nameToUnicode.get(name));
}
int[] codePoints = new int[unicodeList.length];
int index = 0;
for (String hex : unicodeList)
{
codePoints[index++] = Integer.parseInt(hex, 16);
}
String string = new String(codePoints, 0 , codePoints.length);
// forward mapping
nameToUnicode.put(name, string);
// reverse mapping
if (!unicodeToName.containsKey(string))
{
unicodeToName.put(string, name);
}
}
}
}
finally
{
in.close();
}
}
/**
* Returns the name for the given Unicode code point.
*
* @param codePoint Unicode code point
* @return PostScript glyph name, or ".notdef"
*/
public String codePointToName(int codePoint)
{
String name = unicodeToName.get(new String(new int[] { codePoint }, 0 , 1));
if (name == null)
{
return ".notdef";
}
return name;
}
/**
* Returns the name for a given sequence of Unicode characters.
*
* @param unicodeSequence sequence of Unicode characters
* @return PostScript glyph name, or ".notdef"
*/
public String sequenceToName(String unicodeSequence)
{
String name = unicodeToName.get(unicodeSequence);
if (name == null)
{
return ".notdef";
}
return name;
}
/**
* Returns the Unicode character sequence for the given glyph name, or null if there isn't any.
*
* @param name PostScript glyph name
* @return Unicode character(s), or null.
*/
public String toUnicode(String name)
{
if (name == null)
{
return null;
}
String unicode = nameToUnicode.get(name);
if (unicode == null)
{
// test if we have a suffix and if so remove it
if (name.indexOf('.') > 0)
{
unicode = toUnicode(name.substring(0, name.indexOf('.')));
}
else if (name.startsWith("uni") && name.length() == 7)
{
// test for Unicode name in the format uniXXXX where X is hex
int nameLength = name.length();
StringBuilder uniStr = new StringBuilder();
try
{
for (int chPos = 3; chPos + 4 <= nameLength; chPos += 4)
{
int codePoint = Integer.parseInt(name.substring(chPos, chPos + 4), 16);
if (codePoint > 0xD7FF && codePoint < 0xE000)
{
LOG.warn("Unicode character name with disallowed code area: " + name);
}
else
{
uniStr.append((char) codePoint);
}
}
unicode = uniStr.toString();
}
catch (NumberFormatException nfe)
{
LOG.warn("Not a number in Unicode character name: " + name);
}
}
else if (name.startsWith("u") && name.length() == 5)
{
// test for an alternate Unicode name representation uXXXX
try
{
int codePoint = Integer.parseInt(name.substring(1), 16);
if (codePoint > 0xD7FF && codePoint < 0xE000)
{
LOG.warn("Unicode character name with disallowed code area: " + name);
}
else
{
unicode = String.valueOf((char) codePoint);
}
}
catch (NumberFormatException nfe)
{
LOG.warn("Not a number in Unicode character name: " + name);
}
}
nameToUnicode.put(name, unicode);
}
return unicode;
}
}
| |
/*
* Copyright 2019 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.disambiguate;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.truth.Truth.assertThat;
import static com.google.javascript.jscomp.CompilerTestCase.lines;
import static com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason.ALGEBRAIC;
import static com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason.ENUM_ELEMENT;
import static com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason.FORCED;
import static com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason.INTERFACE;
import static com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason.PROTOTYPE;
import static java.util.stream.Collectors.joining;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableTable;
import com.google.common.truth.Correspondence;
import com.google.common.truth.MultimapSubject;
import com.google.common.truth.TableSubject;
import com.google.javascript.jscomp.Compiler;
import com.google.javascript.jscomp.CompilerPass;
import com.google.javascript.jscomp.CompilerTestCase;
import com.google.javascript.jscomp.NodeTraversal;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.TypeMismatch;
import com.google.javascript.jscomp.disambiguate.TypeGraphBuilder.EdgeReason;
import com.google.javascript.jscomp.graph.DiGraph;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode;
import com.google.javascript.jscomp.graph.LowestCommonAncestorFinder;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeRegistry;
import com.google.javascript.rhino.jstype.JSTypeResolver;
import com.google.javascript.rhino.jstype.ObjectType;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import javax.annotation.Nullable;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class TypeGraphBuilderTest extends CompilerTestCase {
@Rule @GwtIncompatible public final TestName testName = new TestName();
private final Compiler compiler = new Compiler();
private final JSTypeRegistry registry = this.compiler.getTypeRegistry();
private final TypeFlattener flattener = new TypeFlattener(this.registry, (t) -> false);
private CompilerPass processor;
private DiGraph<FlatType, Object> result;
@Override
protected Compiler createCompiler() {
return this.compiler;
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
assertThat(compiler).isSameInstanceAs(this.compiler);
return this.processor;
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
this.enableTypeCheck();
}
private LinkedHashMap<String, FlatType> collectTypesFromCode(String src) {
TypeFlattener flattener = this.flattener;
LinkedHashMap<String, FlatType> testTypes = new LinkedHashMap<>();
/** Flatten and collect the types of all NAMEs that start with "test". */
this.processor =
(externs, main) ->
NodeTraversal.traverse(
this.compiler,
main,
new AbstractPostOrderCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node unused) {
if (n.isName() && n.getString().startsWith("test")) {
testTypes.put(n.getString(), flattener.flatten(n.getJSType()));
}
}
});
this.testSame(srcs(src));
this.processor = null;
return testTypes;
}
@Test
public void top_isAboveObject() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines( //
"const test = new Object();"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("*", "Object.prototype", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("Object.prototype", "Object", PROTOTYPE);
}
@Test
public void top_isAboveInterface() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"/** @interface */", //
"class IFoo { }",
"",
"let /** !IFoo */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("*", "Object.prototype", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("Object.prototype", "Object", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Object", "IFoo.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("IFoo.prototype", "IFoo", PROTOTYPE);
}
@Test
public void prototypeChain_isInserted() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }", //
"",
"const test = new Foo();"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Object.prototype", "Object", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Object", "Foo.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Foo.prototype", "Foo", PROTOTYPE);
}
@Test
public void prototypeChain_canBranch() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }", //
"class Bar extends Foo { }", //
"class Qux extends Foo { }", //
"",
"const testBar = new Bar();",
"const testQux = new Qux();"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Foo.prototype", "Foo", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Foo", "Bar.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Bar.prototype", "Bar", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Foo", "Qux.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Qux.prototype", "Qux", PROTOTYPE);
}
@Test
public void constructorDef_includesPrototypeAndInstanceType_evenIfUnused() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }", //
"",
"const test = Foo;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("*", "Object.prototype", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("Object.prototype", "Object", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Object", "Foo.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Foo.prototype", "Foo", PROTOTYPE);
}
@Test
public void prototypeChain_connectsInterfaces() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"/** @interface */", //
"class IFoo { }",
"",
"let /** !IFoo */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Object", "IFoo.prototype", PROTOTYPE);
this.assertThatResultAsTable().containsCell("IFoo.prototype", "IFoo", PROTOTYPE);
}
@Test
public void prototypeChain_connectsSubclasses_viaClassSideInheritance() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"/** @constructor */ function Foo0 () { }",
"class Foo1 extends Foo0 { }", //
"class Foo2 extends Foo1 { }",
"",
"let /** !(typeof Foo2) */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Function.prototype", "(typeof Foo0)", PROTOTYPE);
this.assertThatResultAsTable().containsCell("(typeof Foo0)", "(typeof Foo1)", PROTOTYPE);
this.assertThatResultAsTable().containsCell("(typeof Foo1)", "(typeof Foo2)", PROTOTYPE);
}
@Test
public void interfaces_classImplementingClass_doesNotCreateConnection() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
try (JSTypeResolver.Closer closer = this.registry.getResolver().openForDefinition()) {
FunctionType child =
FunctionType.builder(this.registry).forConstructor().withName("Child").build();
FunctionType parent =
FunctionType.builder(this.registry).forConstructor().withName("Parent").build();
child.setImplementedInterfaces(ImmutableList.of(parent.getInstanceType()));
builder.add(flattener.flatten(child.getInstanceType()));
}
// When
this.result = builder.build();
// Then
this.assertThatResultAsMultimap().doesNotContainEntry("Parent", "Child");
}
@Test
public void interfaces_classImplementingInterface_createsConnection() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
try (JSTypeResolver.Closer closer = this.registry.getResolver().openForDefinition()) {
FunctionType child =
FunctionType.builder(this.registry).forConstructor().withName("Child").build();
FunctionType parent =
FunctionType.builder(this.registry).forInterface().withName("Parent").build();
child.setImplementedInterfaces(ImmutableList.of(parent.getInstanceType()));
builder.add(flattener.flatten(child.getInstanceType()));
}
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Parent", "Child", INTERFACE);
}
@Test
public void interfaces_interfaceExtendingClass_doesNotCreateConnection() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
try (JSTypeResolver.Closer closer = this.registry.getResolver().openForDefinition()) {
FunctionType child =
FunctionType.builder(this.registry).forInterface().withName("Child").build();
FunctionType parent =
FunctionType.builder(this.registry).forConstructor().withName("Parent").build();
child.setExtendedInterfaces(ImmutableList.of(parent.getInstanceType()));
builder.add(flattener.flatten(child.getInstanceType()));
}
// When
this.result = builder.build();
// Then
this.assertThatResultAsMultimap().doesNotContainEntry("Parent", "Child");
}
@Test
public void interfaces_interfaceExtendingInterface_createsConnection() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
try (JSTypeResolver.Closer closer = this.registry.getResolver().openForDefinition()) {
FunctionType child =
FunctionType.builder(this.registry).forInterface().withName("Child").build();
FunctionType parent =
FunctionType.builder(this.registry).forInterface().withName("Parent").build();
child.setExtendedInterfaces(ImmutableList.of(parent.getInstanceType()));
builder.add(flattener.flatten(child.getInstanceType()));
}
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Parent", "Child", INTERFACE);
}
@Test
public void unions_connectedAboveMembers() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(ImmutableSet.of("Foo", "Bar", "Qux"), ImmutableSet.of("(Bar|Foo|Qux)", "*"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }",
"class Bar { }",
"class Qux { }",
"",
"let /** (!Foo|!Bar|!Qux) */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("(Bar|Foo|Qux)", "Bar", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("(Bar|Foo|Qux)", "Foo", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("(Bar|Foo|Qux)", "Qux", ALGEBRAIC);
}
@Test
public void unions_connectBelowLca() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(ImmutableSet.of("Foo", "Bar", "Qux"), ImmutableSet.of("(Bar|Foo|Qux)", "Kif"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
FlatType flatKif = this.flattener.flatten(this.registry.createObjectType("Kif", null));
builder.add(flatKif);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }",
"class Bar { }",
"class Qux { }",
"",
"let /** (!Foo|!Bar|!Qux) */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Kif", "(Bar|Foo|Qux)", ALGEBRAIC);
}
@Test
public void unions_connectBelowLca_withMultipleLcas() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(
ImmutableSet.of("Foo", "Bar", "Qux"),
ImmutableSet.of("(Bar|Foo|Qux)", "Kif", "Lop"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
FlatType flatKif = this.flattener.flatten(this.registry.createObjectType("Kif", null));
builder.add(flatKif);
FlatType flatLop = this.flattener.flatten(this.registry.createObjectType("Lop", null));
builder.add(flatLop);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }",
"class Bar { }",
"class Qux { }",
"",
"let /** (!Foo|!Bar|!Qux) */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Kif", "(Bar|Foo|Qux)", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("Lop", "(Bar|Foo|Qux)", ALGEBRAIC);
}
@Test
public void unions_connectBelowLca_whichIsAlsoUnion() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(ImmutableSet.of("Foo", "Bar", "Qux"), ImmutableSet.of("(Bar|Foo|Qux)", "Kif"))
.addStub(ImmutableSet.of("Foo", "Bar"), ImmutableSet.of("(Bar|Foo)", "(Bar|Foo|Qux)"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
FlatType flatKif = this.flattener.flatten(this.registry.createObjectType("Kif", null));
builder.add(flatKif);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }",
"class Bar { }",
"class Qux { }",
"",
"let /** (!Foo|!Bar|!Qux) */ testA;",
"let /** (!Foo|!Bar) */ testB;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Kif", "(Bar|Foo|Qux)", ALGEBRAIC);
this.assertThatResultAsTable().containsCell("(Bar|Foo|Qux)", "(Bar|Foo)", ALGEBRAIC);
}
@Test
public void unions_connectBelowLca_whichHasSameDescendentCount() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(
ImmutableSet.of("Foo.prototype", "Bar.prototype"),
ImmutableSet.of("(Bar.prototype|Foo.prototype)", "Kif"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Kif { }",
"class Foo extends Kif { }",
"class Bar extends Kif { }",
"",
"let /** ((typeof Foo.prototype)|(typeof Bar.prototype)) */ test;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Kif", "(Bar.prototype|Foo.prototype)", ALGEBRAIC);
// Also check post-conditions.
}
@Test
public void forcedEdges_areInserted() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
builder.addForcedEdge(
TypeMismatch.createForTesting(
this.registry.createObjectType("Foo", null),
this.registry.createObjectType("Bar", null)));
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Bar", "Foo", FORCED);
}
@Test
public void forcedEdges_endsAreInsertedEagerly() {
// Given
StubLcaFinder stubFinder = new StubLcaFinder();
TypeGraphBuilder builder = this.createBuilder(stubFinder);
// When
builder.addForcedEdge(
TypeMismatch.createForTesting(
this.registry.createObjectType("Foo", null),
this.registry.createObjectType("Bar", null)));
this.result = stubFinder.graph;
// Then
this.assertThatResultAsTable().containsCell("Object", "Foo", PROTOTYPE);
this.assertThatResultAsTable().containsCell("Object", "Bar", PROTOTYPE);
}
@Test
public void forcedEdges_areInserted_afterUnionLcaEdges() {
// Given
StubLcaFinder stubFinder =
new StubLcaFinder()
.addStub(ImmutableSet.of("Kif", "Bar"), ImmutableSet.of("(Bar|Kif)", "Object"));
TypeGraphBuilder builder = this.createBuilder(stubFinder);
ObjectType foo = this.registry.createObjectType("Foo", null);
ObjectType bar = this.registry.createObjectType("Bar", foo);
ObjectType kif = this.registry.createObjectType("Kif", null);
JSType barOrKif = this.registry.createUnionType(bar, kif);
builder.add(this.flattener.flatten(barOrKif));
builder.addForcedEdge(TypeMismatch.createForTesting(foo, kif));
this.result = stubFinder.graph;
stubFinder.addPrecondition(
1,
() -> {
// Check in both directions just in case.
this.assertThatResultAsMultimap().doesNotContainEntry("Foo", "Kif");
this.assertThatResultAsMultimap().doesNotContainEntry("Kif", "Foo");
});
// When
builder.build();
// Then
stubFinder.verifyPreconditionCalls();
}
@Test
public void enumElements_connectedToElementType() {
// Given
TypeGraphBuilder builder = this.createBuilder(null);
LinkedHashMap<String, FlatType> testTypes =
this.collectTypesFromCode(
lines(
"class Foo { }",
"",
"/** @enum {!Foo} */",
"const FooEnum = { A: new Foo(), }",
"",
"const test = FooEnum.A;"));
builder.addAll(testTypes.values());
// When
this.result = builder.build();
// Then
this.assertThatResultAsTable().containsCell("Foo", "FooEnum<Foo>", ENUM_ELEMENT);
}
@After
public void verifyResult_topNodeIsOnlyRoot() {
assertThat(
this.result.getNodes().stream()
.filter((n) -> n.getInEdges().isEmpty())
.collect(toImmutableSet()))
.comparingElementsUsing(NODE_HAS_TYPENAME)
.containsExactly("*");
}
@After
public void verifyResult_hasNoSelfEdges() {
assertThat(
this.result.getEdges().stream()
.filter((e) -> Objects.equals(e.getSource(), e.getDestination()))
.collect(toImmutableSet()))
.isEmpty();
}
@After
public void verifyResult_hasNoParallelEdges() {
assertThat(this.result.getEdges())
.comparingElementsUsing(
Correspondence.<DiGraphEdge<FlatType, Object>, DiGraphEdge<FlatType, Object>>from(
(a, e) ->
Objects.equals(a.getSource(), e.getSource())
&& Objects.equals(a.getDestination(), e.getDestination())
&& !Objects.equals(a, e),
"is parallel to"))
.containsNoneIn(this.result.getEdges());
}
@After
@GwtIncompatible
public void renderResultGraph() {
}
private TableSubject assertThatResultAsTable() {
ImmutableTable.Builder<String, String, EdgeReason> table = ImmutableTable.builder();
for (DiGraphEdge<FlatType, Object> edge : this.result.getEdges()) {
table.put(
nameOf(edge.getSource()), nameOf(edge.getDestination()), (EdgeReason) edge.getValue());
}
return assertThat(table.build());
}
private MultimapSubject assertThatResultAsMultimap() {
ImmutableMultimap.Builder<String, String> multimap = ImmutableMultimap.builder();
for (DiGraphEdge<FlatType, Object> edge : this.result.getEdges()) {
multimap.put(nameOf(edge.getSource()), nameOf(edge.getDestination()));
}
return assertThat(multimap.build());
}
private TypeGraphBuilder createBuilder(@Nullable StubLcaFinder optLcaFinder) {
StubLcaFinder lcaFinder = (optLcaFinder == null) ? new StubLcaFinder() : optLcaFinder;
return new TypeGraphBuilder(this.flattener, lcaFinder::setGraph);
}
/**
* A fake implementation of a finder.
*
* <p>Instances allow setting stub responses for {@code findAll} calls. Inputs an outputs are
* specfied using sets of type names.
*/
private static final class StubLcaFinder extends LowestCommonAncestorFinder<FlatType, Object> {
private DiGraph<FlatType, Object> graph;
private final LinkedHashMap<ImmutableSet<String>, ImmutableSet<String>> stubs =
new LinkedHashMap<>();
private final LinkedHashMap<Runnable, Integer> preconditions = new LinkedHashMap<>();
StubLcaFinder() {
super(null);
}
StubLcaFinder setGraph(DiGraph<FlatType, Object> graph) {
this.graph = graph;
return this;
}
StubLcaFinder addStub(ImmutableSet<String> from, ImmutableSet<String> to) {
this.stubs.put(from, to);
return this;
}
StubLcaFinder addPrecondition(int callCount, Runnable x) {
this.preconditions.put(x, callCount);
return this;
}
StubLcaFinder verifyPreconditionCalls() {
for (Map.Entry<Runnable, Integer> entry : this.preconditions.entrySet()) {
assertThat(entry.getValue()).isEqualTo(0);
}
return this;
}
@Override
public ImmutableSet<FlatType> findAll(Set<FlatType> roots) {
for (Map.Entry<Runnable, Integer> entry : this.preconditions.entrySet()) {
entry.getKey().run();
entry.setValue(entry.getValue() - 1);
}
ImmutableSet<String> rootNames =
roots.stream().map(TypeGraphBuilderTest::nameOf).collect(toImmutableSet());
assertThat(this.stubs).containsKey(rootNames);
ImmutableSet<String> resultNames = this.stubs.get(rootNames);
ImmutableSet<FlatType> results =
this.graph.getNodes().stream()
.map(DiGraphNode::getValue)
.filter((t) -> resultNames.contains(nameOf(t)))
.collect(toImmutableSet());
assertThat(results).hasSize(resultNames.size());
return results;
}
}
private static String nameOf(DiGraphNode<FlatType, Object> node) {
return nameOf(node.getValue());
}
private static String nameOf(FlatType flat) {
switch (flat.getArity()) {
case SINGLE:
return flat.getTypeSingle().toString();
case UNION:
return "("
+ flat.getTypeUnion().stream()
.map((f) -> f.getTypeSingle().toString())
.sorted()
.collect(joining("|"))
+ ")";
}
throw new AssertionError();
}
private static final Correspondence<DiGraphNode<FlatType, Object>, String> NODE_HAS_TYPENAME =
Correspondence.transforming(TypeGraphBuilderTest::nameOf, "in a node with type");
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections4.bag;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import org.apache.commons.collections4.Bag;
import org.apache.commons.collections4.BulkTest;
import org.apache.commons.collections4.collection.AbstractCollectionTest;
import org.apache.commons.collections4.set.AbstractSetTest;
/**
* Abstract test class for {@link org.apache.commons.collections4.Bag Bag} methods and contracts.
* <p>
* To use, simply extend this class, and implement
* the {@link #makeObject} method.
* <p>
* If your bag fails one of these tests by design,
* you may still use this base set of cases. Simply override the
* test case (method) your bag fails.
* <p>
* <b>Note:</b> The Bag interface does not conform to the Collection interface
* so the generic collection tests from AbstractCollectionTest would normally fail.
* As a work-around since 4.0, a CollectionBag decorator can be used
* to make any Bag implementation comply to the Collection contract.
* <p>
* This abstract test class does wrap the concrete bag implementation
* with such a decorator, see the overridden {@link #resetEmpty()} and
* {@link #resetFull()} methods.
* <p>
* In addition to the generic collection tests (prefix testCollection) inherited
* from AbstractCollectionTest, there are test methods that test the "normal" Bag
* interface (prefix testBag). For Bag specific tests use the {@link #makeObject()} and
* {@link #makeFullCollection()} methods instead of {@link #resetEmpty()} and resetFull(),
* otherwise the collection will be wrapped by a {@link CollectionBag} decorator.
*
* @version $Id: AbstractBagTest.java 1540860 2013-11-11 21:58:27Z ebourg $
*/
public abstract class AbstractBagTest<T> extends AbstractCollectionTest<T> {
/**
* JUnit constructor.
*
* @param testName the test class name
*/
public AbstractBagTest(final String testName) {
super(testName);
}
//-----------------------------------------------------------------------
/**
* Returns an empty {@link ArrayList}.
*/
@Override
public Collection<T> makeConfirmedCollection() {
final ArrayList<T> list = new ArrayList<T>();
return list;
}
/**
* Returns a full collection.
*/
@Override
public Collection<T> makeConfirmedFullCollection() {
final Collection<T> coll = makeConfirmedCollection();
coll.addAll(Arrays.asList(getFullElements()));
return coll;
}
/**
* Return a new, empty bag to used for testing.
*
* @return the bag to be tested
*/
@Override
public abstract Bag<T> makeObject();
/**
* {@inheritDoc}
*/
@Override
public Bag<T> makeFullCollection() {
final Bag<T> bag = makeObject();
bag.addAll(Arrays.asList(getFullElements()));
return bag;
}
//-----------------------------------------------------------------------
@Override
public void resetEmpty() {
this.setCollection(CollectionBag.collectionBag(makeObject()));
this.setConfirmed(makeConfirmedCollection());
}
@Override
public void resetFull() {
this.setCollection(CollectionBag.collectionBag(makeFullCollection()));
this.setConfirmed(makeConfirmedFullCollection());
}
//-----------------------------------------------------------------------
/**
* Returns the {@link #collection} field cast to a {@link Bag}.
*
* @return the collection field as a Bag
*/
@Override
public Bag<T> getCollection() {
return (Bag<T>) super.getCollection();
}
//-----------------------------------------------------------------------
@SuppressWarnings("unchecked")
public void testBagAdd() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
assertTrue("Should contain 'A'", bag.contains("A"));
assertEquals("Should have count of 1", 1, bag.getCount("A"));
bag.add((T) "A");
assertTrue("Should contain 'A'", bag.contains("A"));
assertEquals("Should have count of 2", 2, bag.getCount("A"));
bag.add((T) "B");
assertTrue(bag.contains("A"));
assertTrue(bag.contains("B"));
}
@SuppressWarnings("unchecked")
public void testBagEqualsSelf() {
final Bag<T> bag = makeObject();
assertTrue(bag.equals(bag));
if (!isAddSupported()) {
return;
}
bag.add((T) "elt");
assertTrue(bag.equals(bag));
bag.add((T) "elt"); // again
assertTrue(bag.equals(bag));
bag.add((T) "elt2");
assertTrue(bag.equals(bag));
}
@SuppressWarnings("unchecked")
public void testBagRemove() {
if (!isRemoveSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
assertEquals("Should have count of 1", 1, bag.getCount("A"));
bag.remove("A");
assertEquals("Should have count of 0", 0, bag.getCount("A"));
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "A");
assertEquals("Should have count of 4", 4, bag.getCount("A"));
bag.remove("A", 0);
assertEquals("Should have count of 4", 4, bag.getCount("A"));
bag.remove("A", 2);
assertEquals("Should have count of 2", 2, bag.getCount("A"));
bag.remove("A");
assertEquals("Should have count of 0", 0, bag.getCount("A"));
}
@SuppressWarnings("unchecked")
public void testBagRemoveAll() {
if (!isRemoveSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A", 2);
assertEquals("Should have count of 2", 2, bag.getCount("A"));
bag.add((T) "B");
bag.add((T) "C");
assertEquals("Should have count of 4", 4, bag.size());
final List<String> delete = new ArrayList<String>();
delete.add("A");
delete.add("B");
bag.removeAll(delete);
assertEquals("Should have count of 1", 1, bag.getCount("A"));
assertEquals("Should have count of 0", 0, bag.getCount("B"));
assertEquals("Should have count of 1", 1, bag.getCount("C"));
assertEquals("Should have count of 2", 2, bag.size());
}
@SuppressWarnings("unchecked")
public void testBagContains() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
assertEquals("Bag does not have at least 1 'A'", false, bag.contains("A"));
assertEquals("Bag does not have at least 1 'B'", false, bag.contains("B"));
bag.add((T) "A"); // bag 1A
assertEquals("Bag has at least 1 'A'", true, bag.contains("A"));
assertEquals("Bag does not have at least 1 'B'", false, bag.contains("B"));
bag.add((T) "A"); // bag 2A
assertEquals("Bag has at least 1 'A'", true, bag.contains("A"));
assertEquals("Bag does not have at least 1 'B'", false, bag.contains("B"));
bag.add((T) "B"); // bag 2A,1B
assertEquals("Bag has at least 1 'A'", true, bag.contains("A"));
assertEquals("Bag has at least 1 'B'", true, bag.contains("B"));
}
@SuppressWarnings("unchecked")
public void testBagContainsAll() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
final List<String> known = new ArrayList<String>();
final List<String> known1A = new ArrayList<String>();
known1A.add("A");
final List<String> known2A = new ArrayList<String>();
known2A.add("A");
known2A.add("A");
final List<String> known1B = new ArrayList<String>();
known1B.add("B");
final List<String> known1A1B = new ArrayList<String>();
known1A1B.add("A");
known1A1B.add("B");
assertEquals("Bag containsAll of empty", true, bag.containsAll(known));
assertEquals("Bag does not containsAll of 1 'A'", false, bag.containsAll(known1A));
assertEquals("Bag does not containsAll of 2 'A'", false, bag.containsAll(known2A));
assertEquals("Bag does not containsAll of 1 'B'", false, bag.containsAll(known1B));
assertEquals("Bag does not containsAll of 1 'A' 1 'B'", false, bag.containsAll(known1A1B));
bag.add((T) "A"); // bag 1A
assertEquals("Bag containsAll of empty", true, bag.containsAll(known));
assertEquals("Bag containsAll of 1 'A'", true, bag.containsAll(known1A));
assertEquals("Bag does not containsAll of 2 'A'", false, bag.containsAll(known2A));
assertEquals("Bag does not containsAll of 1 'B'", false, bag.containsAll(known1B));
assertEquals("Bag does not containsAll of 1 'A' 1 'B'", false, bag.containsAll(known1A1B));
bag.add((T) "A"); // bag 2A
assertEquals("Bag containsAll of empty", true, bag.containsAll(known));
assertEquals("Bag containsAll of 1 'A'", true, bag.containsAll(known1A));
assertEquals("Bag containsAll of 2 'A'", true, bag.containsAll(known2A));
assertEquals("Bag does not containsAll of 1 'B'", false, bag.containsAll(known1B));
assertEquals("Bag does not containsAll of 1 'A' 1 'B'", false, bag.containsAll(known1A1B));
bag.add((T) "A"); // bag 3A
assertEquals("Bag containsAll of empty", true, bag.containsAll(known));
assertEquals("Bag containsAll of 1 'A'", true, bag.containsAll(known1A));
assertEquals("Bag containsAll of 2 'A'", true, bag.containsAll(known2A));
assertEquals("Bag does not containsAll of 1 'B'", false, bag.containsAll(known1B));
assertEquals("Bag does not containsAll of 1 'A' 1 'B'", false, bag.containsAll(known1A1B));
bag.add((T) "B"); // bag 3A1B
assertEquals("Bag containsAll of empty", true, bag.containsAll(known));
assertEquals("Bag containsAll of 1 'A'", true, bag.containsAll(known1A));
assertEquals("Bag containsAll of 2 'A'", true, bag.containsAll(known2A));
assertEquals("Bag containsAll of 1 'B'", true, bag.containsAll(known1B));
assertEquals("Bag containsAll of 1 'A' 1 'B'", true, bag.containsAll(known1A1B));
}
@SuppressWarnings("unchecked")
public void testBagSize() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
assertEquals("Should have 0 total items", 0, bag.size());
bag.add((T) "A");
assertEquals("Should have 1 total items", 1, bag.size());
bag.add((T) "A");
assertEquals("Should have 2 total items", 2, bag.size());
bag.add((T) "A");
assertEquals("Should have 3 total items", 3, bag.size());
bag.add((T) "B");
assertEquals("Should have 4 total items", 4, bag.size());
bag.add((T) "B");
assertEquals("Should have 5 total items", 5, bag.size());
bag.remove("A", 2);
assertEquals("Should have 1 'A'", 1, bag.getCount("A"));
assertEquals("Should have 3 total items", 3, bag.size());
bag.remove("B");
assertEquals("Should have 1 total item", 1, bag.size());
}
@SuppressWarnings("unchecked")
public void testBagRetainAll() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
final List<String> retains = new ArrayList<String>();
retains.add("B");
retains.add("C");
bag.retainAll(retains);
assertEquals("Should have 2 total items", 2, bag.size());
}
@SuppressWarnings("unchecked")
public void testBagIterator() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
assertEquals("Bag should have 3 items", 3, bag.size());
final Iterator<T> i = bag.iterator();
boolean foundA = false;
while (i.hasNext()) {
final String element = (String) i.next();
// ignore the first A, remove the second via Iterator.remove()
if (element.equals("A")) {
if (!foundA) {
foundA = true;
} else {
i.remove();
}
}
}
assertTrue("Bag should still contain 'A'", bag.contains("A"));
assertEquals("Bag should have 2 items", 2, bag.size());
assertEquals("Bag should have 1 'A'", 1, bag.getCount("A"));
}
@SuppressWarnings("unchecked")
public void testBagIteratorFail() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
final Iterator<T> it = bag.iterator();
it.next();
bag.remove("A");
try {
it.next();
fail("Should throw ConcurrentModificationException");
} catch (final ConcurrentModificationException e) {
// expected
}
}
@SuppressWarnings("unchecked")
public void testBagIteratorFailNoMore() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
final Iterator<T> it = bag.iterator();
it.next();
it.next();
it.next();
try {
it.next();
fail("Should throw NoSuchElementException");
} catch (final NoSuchElementException ex) {
// expected
}
}
@SuppressWarnings("unchecked")
public void testBagIteratorFailDoubleRemove() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
final Iterator<T> it = bag.iterator();
it.next();
it.next();
assertEquals(3, bag.size());
it.remove();
assertEquals(2, bag.size());
try {
it.remove();
fail("Should throw IllegalStateException");
} catch (final IllegalStateException ex) {
// expected
}
assertEquals(2, bag.size());
it.next();
it.remove();
assertEquals(1, bag.size());
}
@SuppressWarnings("unchecked")
public void testBagIteratorRemoveProtectsInvariants() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
assertEquals(2, bag.size());
final Iterator<T> it = bag.iterator();
assertEquals("A", it.next());
assertEquals(true, it.hasNext());
it.remove();
assertEquals(1, bag.size());
assertEquals(true, it.hasNext());
assertEquals("A", it.next());
assertEquals(false, it.hasNext());
it.remove();
assertEquals(0, bag.size());
assertEquals(false, it.hasNext());
final Iterator<T> it2 = bag.iterator();
assertEquals(false, it2.hasNext());
}
@SuppressWarnings("unchecked")
public void testBagToArray() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
final Object[] array = bag.toArray();
int a = 0, b = 0, c = 0;
for (final Object element : array) {
a += element.equals("A") ? 1 : 0;
b += element.equals("B") ? 1 : 0;
c += element.equals("C") ? 1 : 0;
}
assertEquals(2, a);
assertEquals(2, b);
assertEquals(1, c);
}
@SuppressWarnings("unchecked")
public void testBagToArrayPopulate() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
final String[] array = bag.toArray(new String[0]);
int a = 0, b = 0, c = 0;
for (final String element : array) {
a += element.equals("A") ? 1 : 0;
b += element.equals("B") ? 1 : 0;
c += element.equals("C") ? 1 : 0;
}
assertEquals(2, a);
assertEquals(2, b);
assertEquals(1, c);
}
//-----------------------------------------------------------------------
@SuppressWarnings("unchecked")
public void testBagEquals() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
final Bag<T> bag2 = makeObject();
assertEquals(true, bag.equals(bag2));
bag.add((T) "A");
assertEquals(false, bag.equals(bag2));
bag2.add((T) "A");
assertEquals(true, bag.equals(bag2));
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
bag2.add((T) "A");
bag2.add((T) "B");
bag2.add((T) "B");
bag2.add((T) "C");
assertEquals(true, bag.equals(bag2));
}
@SuppressWarnings("unchecked")
public void testBagEqualsHashBag() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
final Bag<T> bag2 = new HashBag<T>();
assertEquals(true, bag.equals(bag2));
bag.add((T) "A");
assertEquals(false, bag.equals(bag2));
bag2.add((T) "A");
assertEquals(true, bag.equals(bag2));
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
bag2.add((T) "A");
bag2.add((T) "B");
bag2.add((T) "B");
bag2.add((T) "C");
assertEquals(true, bag.equals(bag2));
}
@SuppressWarnings("unchecked")
public void testBagHashCode() {
if (!isAddSupported()) {
return;
}
final Bag<T> bag = makeObject();
final Bag<T> bag2 = makeObject();
assertEquals(0, bag.hashCode());
assertEquals(0, bag2.hashCode());
assertEquals(bag.hashCode(), bag2.hashCode());
bag.add((T) "A");
bag.add((T) "A");
bag.add((T) "B");
bag.add((T) "B");
bag.add((T) "C");
bag2.add((T) "A");
bag2.add((T) "A");
bag2.add((T) "B");
bag2.add((T) "B");
bag2.add((T) "C");
assertEquals(bag.hashCode(), bag2.hashCode());
int total = 0;
total += "A".hashCode() ^ 2;
total += "B".hashCode() ^ 2;
total += "C".hashCode() ^ 1;
assertEquals(total, bag.hashCode());
assertEquals(total, bag2.hashCode());
}
//-----------------------------------------------------------------------
/**
* Bulk test {@link Bag#uniqueSet()}. This method runs through all of
* the tests in {@link AbstractSetTest}.
* After modification operations, {@link #verify()} is invoked to ensure
* that the bag and the other collection views are still valid.
*
* @return a {@link AbstractSetTest} instance for testing the bag's unique set
*/
public BulkTest bulkTestBagUniqueSet() {
return new TestBagUniqueSet();
}
public class TestBagUniqueSet extends AbstractSetTest<T> {
public TestBagUniqueSet() {
super("");
}
@Override
public T[] getFullElements() {
return AbstractBagTest.this.getFullElements();
}
@Override
public T[] getOtherElements() {
return AbstractBagTest.this.getOtherElements();
}
@Override
public Set<T> makeObject() {
return AbstractBagTest.this.makeObject().uniqueSet();
}
@Override
public Set<T> makeFullCollection() {
return AbstractBagTest.this.makeFullCollection().uniqueSet();
}
@Override
public boolean isNullSupported() {
return AbstractBagTest.this.isNullSupported();
}
@Override
public boolean isAddSupported() {
return false;
}
@Override
public boolean isRemoveSupported() {
return false;
}
@Override
public boolean isTestSerialization() {
return false;
}
@Override
public void resetEmpty() {
AbstractBagTest.this.resetEmpty();
TestBagUniqueSet.this.setCollection(AbstractBagTest.this.getCollection().uniqueSet());
TestBagUniqueSet.this.setConfirmed(new HashSet<T>(AbstractBagTest.this.getConfirmed()));
}
@Override
public void resetFull() {
AbstractBagTest.this.resetFull();
TestBagUniqueSet.this.setCollection(AbstractBagTest.this.getCollection().uniqueSet());
TestBagUniqueSet.this.setConfirmed(new HashSet<T>(AbstractBagTest.this.getConfirmed()));
}
@Override
public void verify() {
super.verify();
}
}
//-----------------------------------------------------------------------
/**
* Compare the current serialized form of the Bag
* against the canonical version in SVN.
*/
public void testEmptyBagCompatibility() throws IOException, ClassNotFoundException {
// test to make sure the canonical form has been preserved
final Bag<T> bag = makeObject();
if (bag instanceof Serializable && !skipSerializedCanonicalTests() && isTestSerialization()) {
final Bag<?> bag2 = (Bag<?>) readExternalFormFromDisk(getCanonicalEmptyCollectionName(bag));
assertTrue("Bag is empty",bag2.size() == 0);
assertEquals(bag, bag2);
}
}
/**
* Compare the current serialized form of the Bag
* against the canonical version in SVN.
*/
public void testFullBagCompatibility() throws IOException, ClassNotFoundException {
// test to make sure the canonical form has been preserved
final Bag<T> bag = makeFullCollection();
if (bag instanceof Serializable && !skipSerializedCanonicalTests() && isTestSerialization()) {
final Bag<?> bag2 = (Bag<?>) readExternalFormFromDisk(getCanonicalFullCollectionName(bag));
assertEquals("Bag is the right size",bag.size(), bag2.size());
assertEquals(bag, bag2);
}
}
}
| |
/**
*============================================================================
* Copyright The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC, and
* Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-core/LICENSE.txt for details.
*============================================================================
**/
package gov.nih.nci.cagrid.data.ui.domain;
import gov.nih.nci.cagrid.data.utilities.dmviz.DomainModelVisualizationPanel;
import gov.nih.nci.cagrid.introduce.common.FileFilters;
import gov.nih.nci.cagrid.metadata.MetadataUtils;
import gov.nih.nci.cagrid.metadata.dataservice.DomainModel;
import java.awt.Color;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.Insets;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.UIManager;
import javax.swing.border.TitledBorder;
import org.apache.axis.message.addressing.Address;
import org.apache.axis.message.addressing.EndpointReferenceType;
/**
* DomainModelVisualizer
* GUI app for visualizing a domain model
*
* @author David Ervin
*
* @created Mar 19, 2008 11:13:47 AM
* @version $Id: DomainModelVisualizer.java,v 1.3 2009-05-29 20:50:20 dervin Exp $
*/
public class DomainModelVisualizer extends JFrame {
private DomainModelVisualizationPanel dmVizPanel = null;
private JButton loadFileButton = null;
private JButton saveButton = null;
private JPanel buttonPanel = null;
private JPanel mainPanel = null;
private JButton loadFromServiceButton = null;
private DomainModel currentModel = null;
public DomainModelVisualizer() {
super();
setTitle("Domain Model Visualizer");
initialize();
}
private void initialize() {
this.setContentPane(getMainPanel());
setSize(600,600);
}
private void setCurrentModel(DomainModel model) {
getDmVizPanel().setDomainModel(model);
this.currentModel = model;
}
private DomainModelVisualizationPanel getDmVizPanel() {
if (this.dmVizPanel == null) {
this.dmVizPanel = new DomainModelVisualizationPanel();
dmVizPanel.setBorder(BorderFactory.createTitledBorder(
null, "Domain Model", TitledBorder.DEFAULT_JUSTIFICATION,
TitledBorder.DEFAULT_POSITION, new Font("Dialog", Font.BOLD, 12), new Color(51, 51, 51)));
}
return this.dmVizPanel;
}
/**
* This method initializes loadFileButton
*
* @return javax.swing.JButton
*/
private JButton getLoadFileButton() {
if (loadFileButton == null) {
loadFileButton = new JButton();
loadFileButton.setText("Load File");
loadFileButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
JFileChooser chooser = new JFileChooser();
chooser.setFileFilter(FileFilters.XML_FILTER);
int choice = chooser.showOpenDialog(DomainModelVisualizer.this);
if (choice == JFileChooser.APPROVE_OPTION) {
try {
FileReader reader = new FileReader(chooser.getSelectedFile());
DomainModel model = MetadataUtils.deserializeDomainModel(reader);
reader.close();
setCurrentModel(model);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
});
}
return loadFileButton;
}
/**
* This method initializes saveButton
*
* @return javax.swing.JButton
*/
private JButton getSaveButton() {
if (saveButton == null) {
saveButton = new JButton();
saveButton.setText("Save File");
saveButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
if (currentModel != null) {
JFileChooser chooser = new JFileChooser();
chooser.setFileFilter(FileFilters.XML_FILTER);
int choice = chooser.showSaveDialog(DomainModelVisualizer.this);
if (choice == JFileChooser.APPROVE_OPTION) {
File saveme = chooser.getSelectedFile();
try {
FileWriter writer = new FileWriter(saveme);
MetadataUtils.serializeDomainModel(currentModel, writer);
writer.flush();
writer.close();
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
});
}
return saveButton;
}
/**
* This method initializes loadFromServiceButton
*
* @return javax.swing.JButton
*/
private JButton getLoadFromServiceButton() {
if (loadFromServiceButton == null) {
loadFromServiceButton = new JButton();
loadFromServiceButton.setText("Load From Service");
loadFromServiceButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
String url = JOptionPane.showInputDialog("Enter Service URL");
if (url != null && url.length() != 0) {
try {
EndpointReferenceType epr = new EndpointReferenceType(new Address(url));
DomainModel model = MetadataUtils.getDomainModel(epr);
setCurrentModel(model);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
});
}
return loadFromServiceButton;
}
/**
* This method initializes buttonPanel
*
* @return javax.swing.JPanel
*/
private JPanel getButtonPanel() {
if (buttonPanel == null) {
GridLayout gridLayout = new GridLayout();
gridLayout.setRows(1);
gridLayout.setHgap(4);
gridLayout.setColumns(3);
buttonPanel = new JPanel();
buttonPanel.setLayout(gridLayout);
buttonPanel.add(getLoadFromServiceButton(), null);
buttonPanel.add(getLoadFileButton(), null);
buttonPanel.add(getSaveButton(), null);
}
return buttonPanel;
}
/**
* This method initializes mainPanel
*
* @return javax.swing.JPanel
*/
private JPanel getMainPanel() {
if (mainPanel == null) {
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.anchor = GridBagConstraints.EAST;
gridBagConstraints1.insets = new Insets(2, 2, 2, 2);
gridBagConstraints1.gridy = 1;
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.fill = GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0D;
gridBagConstraints.weighty = 1.0D;
gridBagConstraints.insets = new Insets(2, 2, 2, 2);
gridBagConstraints.gridy = 0;
mainPanel = new JPanel();
mainPanel.setLayout(new GridBagLayout());
mainPanel.add(getDmVizPanel(), gridBagConstraints);
mainPanel.add(getButtonPanel(), gridBagConstraints1);
}
return mainPanel;
}
public static void main(String[] args) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception ex) {
System.err.println("Error setting system look and feel: " + ex.getMessage());
}
DomainModelVisualizer viz = new DomainModelVisualizer();
viz.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
viz.setVisible(true);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean;
import java.lang.reflect.Method;
import java.util.Set;
import org.apache.camel.BeanScope;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.StaticService;
import org.apache.camel.spi.BeanProcessorFactory;
import org.apache.camel.spi.annotations.JdkService;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@JdkService(BeanProcessorFactory.FACTORY)
public final class DefaultBeanProcessorFactory extends ServiceSupport
implements BeanProcessorFactory, CamelContextAware, StaticService {
private static final Logger LOG = LoggerFactory.getLogger(DefaultBeanProcessorFactory.class);
private CamelContext camelContext;
private ParameterMappingStrategy parameterMappingStrategy;
private BeanComponent beanComponent;
public DefaultBeanProcessorFactory() {
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public Processor createBeanProcessor(CamelContext camelContext, Object bean, Method method) throws Exception {
BeanInfo info = new BeanInfo(camelContext, method, parameterMappingStrategy, beanComponent);
return new BeanProcessor(bean, info);
}
@Override
public Processor createBeanProcessor(
CamelContext camelContext, Object bean, String beanType, Class<?> beanClass, String ref,
String method, BeanScope scope)
throws Exception {
BeanProcessor answer;
Class<?> clazz = bean != null ? bean.getClass() : null;
BeanHolder beanHolder;
if (ObjectHelper.isNotEmpty(ref)) {
if (scope == BeanScope.Singleton) {
// cache the registry lookup which avoids repeat lookup in the registry
beanHolder = new RegistryBean(camelContext, ref, parameterMappingStrategy, beanComponent)
.createCacheHolder();
// bean holder will check if the bean exists
bean = beanHolder.getBean(null);
} else {
// we do not cache so we invoke on-demand
beanHolder = new RegistryBean(camelContext, ref, parameterMappingStrategy, beanComponent);
}
if (scope == BeanScope.Request) {
// wrap in registry scoped holder
beanHolder = new RequestBeanHolder(beanHolder);
}
answer = new BeanProcessor(beanHolder);
} else {
if (bean == null) {
if (beanType == null && beanClass == null) {
throw new IllegalArgumentException("bean, ref or beanType must be provided");
}
// the clazz is either from beanType or beanClass
if (beanType != null) {
try {
clazz = camelContext.getClassResolver().resolveMandatoryClass(beanType);
} catch (ClassNotFoundException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
} else {
clazz = beanClass;
}
if (scope == BeanScope.Singleton && clazz != null) {
// attempt to lookup in registry by type to favour using it (like bean ref would do to lookup in registry)
Set<?> beans = camelContext.getRegistry().findByType(clazz);
if (!beans.isEmpty()) {
if (beans.size() == 1) {
LOG.debug("Exactly one instance of type: {} in registry found.", clazz);
bean = beans.iterator().next();
} else {
LOG.debug("Found {} bean instances of type: {} in the registry.", beans.size(), clazz);
}
}
}
// attempt to create bean using injector which supports auto-wiring
if (bean == null && scope == BeanScope.Singleton && camelContext.getInjector().supportsAutoWiring()) {
try {
LOG.debug("Attempting to create new bean instance from class: {} via auto-wiring enabled", clazz);
bean = CamelContextHelper.newInstance(camelContext, clazz);
} catch (Exception e) {
LOG.debug("Error creating new bean instance from class: {}. This exception is ignored", clazz, e);
}
}
// create a bean if there is a default public no-arg constructor
if (bean == null && scope == BeanScope.Singleton && ObjectHelper.hasDefaultPublicNoArgConstructor(clazz)) {
LOG.debug("Class has default no-arg constructor so creating a new bean instance: {}", clazz);
bean = CamelContextHelper.newInstance(camelContext, clazz);
ObjectHelper.notNull(bean, "bean", this);
}
}
// validate the bean type is not from java so you by mistake think its a reference
// to a bean name but the String is being invoke instead
if (bean instanceof String) {
throw new IllegalArgumentException(
"The bean instance is a java.lang.String type: " + bean
+ ". We suppose you want to refer to a bean instance by its id instead. Please use ref.");
}
// the holder should either be bean or type based
if (bean != null) {
beanHolder = new ConstantBeanHolder(bean, camelContext, parameterMappingStrategy, beanComponent);
} else {
if (scope == BeanScope.Singleton && ObjectHelper.hasDefaultPublicNoArgConstructor(clazz)) {
// we can only cache if we can create an instance of the bean, and for that we need a public constructor
beanHolder = new ConstantTypeBeanHolder(clazz, camelContext, parameterMappingStrategy, beanComponent)
.createCacheHolder();
} else {
if (ObjectHelper.hasDefaultPublicNoArgConstructor(clazz)) {
beanHolder = new ConstantTypeBeanHolder(clazz, camelContext, parameterMappingStrategy, beanComponent);
} else if (clazz.isInterface()) {
throw new IllegalArgumentException(
"The bean is an interface type: " + clazz
+ ". Interfaces are only supported to lookup in the Camel registry for a single instance of such type."
+ " Otherwise the bean must be a class type.");
} else {
// this is only for invoking static methods on the bean
beanHolder = new ConstantStaticTypeBeanHolder(
clazz, camelContext, parameterMappingStrategy, beanComponent);
}
}
}
if (scope == BeanScope.Request) {
// wrap in registry scoped holder
beanHolder = new RequestBeanHolder(beanHolder);
}
answer = new BeanProcessor(beanHolder);
}
// check for method exists
if (method != null) {
answer.setMethod(method);
// check there is a method with the given name, and leverage BeanInfo for that
// which we only do if we are caching the bean as otherwise we will create a bean instance for this check
// which we only want to do if we cache the bean
if (scope == BeanScope.Singleton) {
BeanInfo beanInfo = beanHolder.getBeanInfo();
if (bean != null) {
// there is a bean instance, so check for any methods
if (!beanInfo.hasMethod(method)) {
throw RuntimeCamelException.wrapRuntimeCamelException(new MethodNotFoundException(null, bean, method));
}
} else if (clazz != null) {
// there is no bean instance, so check for static methods only
if (!beanInfo.hasStaticMethod(method)) {
throw RuntimeCamelException
.wrapRuntimeCamelException(new MethodNotFoundException(null, clazz, method, true));
}
}
}
}
return answer;
}
@Override
protected void doInit() throws Exception {
parameterMappingStrategy = ParameterMappingStrategyHelper.createParameterMappingStrategy(getCamelContext());
beanComponent = getCamelContext().getComponent("bean", BeanComponent.class);
}
}
| |
package ezvcard.io.xml;
import static ezvcard.VCardVersion.V4_0;
import static ezvcard.property.asserter.PropertyAsserter.assertAddress;
import static ezvcard.property.asserter.PropertyAsserter.assertBinaryProperty;
import static ezvcard.property.asserter.PropertyAsserter.assertDateProperty;
import static ezvcard.property.asserter.PropertyAsserter.assertEmail;
import static ezvcard.property.asserter.PropertyAsserter.assertGeo;
import static ezvcard.property.asserter.PropertyAsserter.assertListProperty;
import static ezvcard.property.asserter.PropertyAsserter.assertRawProperty;
import static ezvcard.property.asserter.PropertyAsserter.assertSimpleProperty;
import static ezvcard.property.asserter.PropertyAsserter.assertStructuredName;
import static ezvcard.property.asserter.PropertyAsserter.assertTelephone;
import static ezvcard.property.asserter.PropertyAsserter.assertTimezone;
import static ezvcard.util.TestUtils.assertIntEquals;
import static ezvcard.util.TestUtils.assertNoMoreVCards;
import static ezvcard.util.TestUtils.assertPropertyCount;
import static ezvcard.util.TestUtils.assertValidate;
import static ezvcard.util.TestUtils.assertVersion;
import static ezvcard.util.TestUtils.assertWarnings;
import static org.custommonkey.xmlunit.XMLAssert.assertXMLEqual;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.util.Iterator;
import javax.xml.transform.TransformerException;
import org.custommonkey.xmlunit.XMLUnit;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import ezvcard.VCard;
import ezvcard.VCardDataType;
import ezvcard.io.AgeType;
import ezvcard.io.AgeType.AgeScribe;
import ezvcard.io.LuckyNumType;
import ezvcard.io.LuckyNumType.LuckyNumScribe;
import ezvcard.io.MyFormattedNameType;
import ezvcard.io.MyFormattedNameType.MyFormattedNameScribe;
import ezvcard.io.SalaryType;
import ezvcard.io.SalaryType.SalaryScribe;
import ezvcard.io.scribe.CannotParseScribe;
import ezvcard.io.scribe.SkipMeScribe;
import ezvcard.parameter.AddressType;
import ezvcard.parameter.EmailType;
import ezvcard.parameter.TelephoneType;
import ezvcard.property.ProductId;
import ezvcard.property.RawProperty;
import ezvcard.property.Xml;
import ezvcard.util.IOUtils;
import ezvcard.util.PartialDate;
import ezvcard.util.TelUri;
import ezvcard.util.UtcOffset;
import ezvcard.util.XmlUtils;
/*
Copyright (c) 2012-2015, Michael Angstadt
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
/**
* @author Michael Angstadt
*/
public class XCardReaderTest {
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@BeforeClass
public static void beforeClass() {
XMLUnit.setIgnoreWhitespace(true);
}
@Test
public void read_single() throws Exception {
//@formatter:off
String xml =
"<!-- ignore -->" +
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"<n>" +
"<surname>House</surname>" +
"<given>Gregory</given>" +
"<!-- ignore -->" +
"<additional />" +
"<prefix>Dr</prefix>" +
"<prefix>Mr</prefix>" +
"<suffix>MD</suffix>" +
"</n>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(2, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.noMore();
assertStructuredName(vcard)
.family("House")
.given("Gregory")
.prefixes("Dr", "Mr")
.suffixes("MD")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_multiple() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"<n>" +
"<surname>House</surname>" +
"<given>Gregory</given>" +
"<additional />" +
"<prefix>Dr</prefix>" +
"<prefix>Mr</prefix>" +
"<suffix>MD</suffix>" +
"</n>" +
"</vcard>" +
"<vcard>" +
"<fn><text>Dr. Lisa Cuddy M.D.</text></fn>" +
"<n>" +
"<surname>Cuddy</surname>" +
"<given>Lisa</given>" +
"<additional />" +
"<prefix>Dr</prefix>" +
"<prefix>Ms</prefix>" +
"<suffix>MD</suffix>" +
"</n>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(2, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.noMore();
assertStructuredName(vcard)
.family("House")
.given("Gregory")
.prefixes("Dr", "Mr")
.suffixes("MD")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(2, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Lisa Cuddy M.D.")
.noMore();
assertStructuredName(vcard)
.family("Cuddy")
.given("Lisa")
.prefixes("Dr", "Ms")
.suffixes("MD")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_default_namespace() throws Exception {
//@formatter:off
String xml =
"<vcards>" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
assertNoMoreVCards(reader);
}
@Test
public void read_wrong_namespace() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"wrong\">" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
assertNoMoreVCards(reader);
}
@Test
public void read_namespace_prefix() throws Exception {
//@formatter:off
String xml =
"<v:vcards xmlns:v=\"" + V4_0.getXmlNamespace() + "\">" +
"<v:vcard>" +
"<v:fn><x:text xmlns:x=\"" + V4_0.getXmlNamespace() + "\">Dr. Gregory House M.D.</x:text></v:fn>" +
"</v:vcard>" +
"</v:vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_preserve_whitespace() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<note><text> This \t is \n a note </text></note>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getNotes())
.value(" This \t is \n a note ")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_ignore_other_namespaces() throws Exception {
//@formatter:off
String xml =
"<root>" +
"<ignore xmlns=\"one\" />" +
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<ignore xmlns=\"two\">text</ignore>" +
"<vcard>" +
"<fn>" +
"<parameters>" +
"<ignore xmlns=\"three\"><foo>bar</foo></ignore>" +
"<pref><ignore xmlns=\"four\">bar</ignore><integer>1</integer></pref>" +
"<pref><integer>2</integer></pref>" +
"</parameters>" +
"<text>Dr. Gregory House M.D.</text>" +
"</fn>" +
"</vcard>" +
"</vcards>" +
"</root>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.param("PREF", "1")
.param("PREF", "2")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_identical_element_names() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<vcard>" +
"<parameters>" +
"<parameters><text>paramValue1</text></parameters>" +
"<group><text>paramValue2</text></group>" +
"</parameters>" +
"<vcard>propValue</vcard>" +
"</vcard>" +
"<group name=\"grp\">" +
"<group>" +
"<text>value</text>" +
"</group>" +
"</group>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(2, vcard);
//@formatter:off
assertRawProperty("VCARD", vcard)
.value("propValue")
.param("PARAMETERS", "paramValue1")
.param("GROUP", "paramValue2")
.noMore();
assertRawProperty("GROUP", vcard)
.group("grp")
.value("value")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_bad_xml() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>John Doe</fn>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
try {
reader.readNext();
fail();
} catch (IOException e) {
assertTrue(e.getCause() instanceof TransformerException);
assertTrue(e.getCause().getCause() instanceof SAXException);
}
assertNoMoreVCards(reader);
}
@Test
public void read_multiple_vcards_elements() throws Exception {
//@formatter:off
String xml =
"<root>" +
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"</vcard>" +
"</vcards>" +
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>Dr. Lisa Cuddy M.D.</text></fn>" +
"</vcard>" +
"</vcards>" +
"</root>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Lisa Cuddy M.D.")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
reader.close();
}
@Test
public void read_parameters() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
//zero params
"<note>" +
"<text>Note 1</text>" +
"</note>" +
//one param
"<note>" +
"<parameters>" +
"<altid><text>1</text></altid>" +
"</parameters>" +
"<text>Hello world!</text>" +
"</note>" +
//one param, but doesn't have a value element, so it should be ignored
"<note>" +
"<parameters>" +
"<altid>1</altid>" +
"</parameters>" +
"<text>Hallo Welt!</text>" +
"</note>" +
//two params
"<note>" +
"<parameters>" +
"<altid><text>1</text></altid>" +
"<language><language-tag>fr</language-tag></language>" +
"</parameters>" +
"<text>Bonjour tout le monde!</text>" +
"</note>" +
//a param with multiple values
"<tel>" +
"<parameters>" +
"<type>" +
"<text>work</text>" +
"<text>voice</text>" +
"</type>" +
"</parameters>" +
"<uri>tel:+1-555-555-1234</uri>" +
"</tel>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(5, vcard);
//@formatter:off
assertSimpleProperty(vcard.getNotes())
.value("Note 1")
.next()
.value("Hello world!")
.param("ALTID", "1")
.next()
.value("Hallo Welt!")
.next()
.value("Bonjour tout le monde!")
.param("ALTID", "1")
.param("LANGUAGE", "fr")
.noMore();
assertTrue(vcard.getNotes().get(2).getParameters().isEmpty());
assertTelephone(vcard)
.uri(new TelUri.Builder("+1-555-555-1234").build())
.types(TelephoneType.WORK, TelephoneType.VOICE)
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
reader.close();
}
@Test
public void read_groups() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<group name=\"item1\">" +
"<fn><text>John Doe</text></fn>" +
"<note><text>Hello world!</text></note>" +
"</group>" +
"<group>" +
"<prodid><text>no name attribute</text></prodid>" +
"</group>" +
"<note><text>A property without a group</text></note>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(4, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.group("item1")
.value("John Doe")
.noMore();
assertSimpleProperty(vcard.getNotes())
.group("item1")
.value("Hello world!")
.next()
.value("A property without a group")
.noMore();
assertSimpleProperty(vcard.getProperties(ProductId.class))
.value("no name attribute")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_non_standard_properties() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
//xCard namespace: no
//scribe: no
//expected: XML property
"<foo xmlns=\"http://example.com\">bar</foo>" +
//xCard namespace: no
//scribe: yes
//parseXml impl: yes
//expected: LuckyNumType
"<a:lucky-num xmlns:a=\"http://luckynum.com\"><a:num>21</a:num></a:lucky-num>" +
//xCard namespace: yes
//scribe: yes
//parseXml impl: yes
//expected: SalaryType
"<x-salary><integer>1000000</integer></x-salary>" +
//xCard namespace: yes
//parseXml impl: no
//expected: AgeType (should be unmarshalled using the default parseXml implementation)
"<x-age><integer>24</integer></x-age>" +
//xCard namespace: yes
//scribe: no
//expected: RawProperty
"<x-gender><text>m</text></x-gender>" +
//xCard namespace: yes
//scribe: yes (standard scribe overridden)
//expected: MyFormattedNameType
"<fn><name>John Doe</name></fn>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
reader.registerScribe(new LuckyNumScribe());
reader.registerScribe(new SalaryScribe());
reader.registerScribe(new AgeScribe());
reader.registerScribe(new MyFormattedNameScribe());
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(6, vcard);
{
Iterator<Xml> xmlIt = vcard.getXmls().iterator();
Xml xmlType = xmlIt.next();
assertXMLEqual(XmlUtils.toDocument("<foo xmlns=\"http://example.com\">bar</foo>"), xmlType.getValue());
assertFalse(xmlIt.hasNext());
}
LuckyNumType luckyNum = vcard.getProperty(LuckyNumType.class);
assertEquals(21, luckyNum.luckyNum);
SalaryType salary = vcard.getProperty(SalaryType.class);
assertEquals(1000000, salary.salary);
AgeType age = vcard.getProperty(AgeType.class);
assertEquals(24, age.age);
RawProperty gender = vcard.getExtendedProperty("X-GENDER");
assertEquals(VCardDataType.TEXT, gender.getDataType());
assertEquals("m", gender.getValue());
MyFormattedNameType fn = vcard.getProperty(MyFormattedNameType.class);
assertEquals("JOHN DOE", fn.value);
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_xml_property() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<x:foo xmlns:x=\"http://example.com\">" +
"<parameters>" +
"<pref><integer>1</integer></pref>" +
"</parameters>" +
"<!-- comment -->" +
"<x:a />" +
"<x:b attr=\"value\">text</x:b>" +
"<x:c>text<x:child>child</x:child></x:c>" +
"</x:foo>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
Xml property = vcard.getXmls().get(0);
assertIntEquals(1, property.getParameters().getPref());
Document actual = property.getValue();
//@formatter:off
String propertyXml =
"<foo xmlns=\"http://example.com\">" +
"<a />" +
"<b attr=\"value\">text</b>" +
"<c>text<child>child</child></c>" +
"</foo>";
Document expected = XmlUtils.toDocument(propertyXml);
//@formatter:on
assertXMLEqual(expected, actual);
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void skipMeException() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<skipme><text>value</text></skipme>" +
"<x-foo><text>value</text></x-foo>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
reader.registerScribe(new SkipMeScribe());
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertRawProperty("x-foo", vcard)
.value("value")
.noMore();
//@formatter:on
assertWarnings(1, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void cannotParseException() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<group name=\"grp\">" +
"<cannotparse><text>value1</text></cannotparse>" +
"</group>" +
"<cannotparse><text>value2</text></cannotparse>" +
"<x-foo><text>value</text></x-foo>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
reader.registerScribe(new CannotParseScribe());
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(3, vcard);
//@formatter:off
assertRawProperty("x-foo", vcard)
.value("value")
.noMore();
//@formatter:on
Xml xmlProperty = vcard.getXmls().get(0);
assertXMLEqual(XmlUtils.toString(xmlProperty.getValue()), XmlUtils.toDocument("<cannotparse xmlns=\"" + V4_0.getXmlNamespace() + "\"><text>value1</text></cannotparse>"), xmlProperty.getValue());
assertEquals("grp", xmlProperty.getGroup());
xmlProperty = vcard.getXmls().get(1);
assertXMLEqual(XmlUtils.toString(xmlProperty.getValue()), XmlUtils.toDocument("<cannotparse xmlns=\"" + V4_0.getXmlNamespace() + "\"><text>value2</text></cannotparse>"), xmlProperty.getValue());
assertNull(xmlProperty.getGroup());
assertWarnings(2, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void close_before_stream_ends() throws Exception {
//@formatter:off
String xml =
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<fn><text>Dr. Gregory House M.D.</text></fn>" +
"</vcard>" +
"<vcard>" +
"<fn><text>Dr. Lisa Cuddy M.D.</text></fn>" +
"</vcard>" +
"</vcards>";
//@formatter:on
XCardReader reader = new XCardReader(xml);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Dr. Gregory House M.D.")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
reader.close();
assertNoMoreVCards(reader);
}
@Test
public void read_utf8() throws Exception {
//@formatter:off
String xml =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\">" +
"<vcard>" +
"<note><text>\u019dote</text></note>" +
"</vcard>" +
"</vcards>";
//@formatter:on
File file = tempFolder.newFile();
Writer writer = IOUtils.utf8Writer(file);
writer.write(xml);
writer.close();
XCardReader reader = new XCardReader(file);
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(1, vcard);
//@formatter:off
assertSimpleProperty(vcard.getNotes())
.value("\u019dote")
.noMore();
//@formatter:on
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
@Test
public void read_empty() throws Exception {
String xml = "<vcards xmlns=\"" + V4_0.getXmlNamespace() + "\" />";
XCardReader reader = new XCardReader(xml);
assertNoMoreVCards(reader);
}
@Test
public void read_rfc6351_example() throws Throwable {
XCardReader reader = read("rfc6351-example.xml");
{
VCard vcard = reader.readNext();
assertVersion(V4_0, vcard);
assertPropertyCount(16, vcard);
//@formatter:off
assertSimpleProperty(vcard.getFormattedNames())
.value("Simon Perreault")
.noMore();
assertStructuredName(vcard)
.family("Perreault")
.given("Simon")
.suffixes("ing. jr", "M.Sc.")
.noMore();
assertDateProperty(vcard.getBirthdays())
.partialDate(PartialDate.builder().month(2).date(3).build())
.noMore();
assertDateProperty(vcard.getAnniversaries())
.partialDate(PartialDate.builder()
.year(2009)
.month(8)
.date(8)
.hour(14)
.minute(30)
.offset(new UtcOffset(false, -5, 0))
.build()
)
.noMore();
assertTrue(vcard.getGender().isMale());
assertSimpleProperty(vcard.getLanguages())
.value("fr")
.param("PREF", "1")
.next()
.value("en")
.param("PREF", "2")
.noMore();
assertListProperty(vcard.getOrganizations())
.values("Viagenie")
.param("TYPE", "work")
.noMore();
assertAddress(vcard)
.streetAddress("2875 boul. Laurier, suite D2-630")
.locality("Quebec")
.region("QC")
.postalCode("G1V 2M2")
.country("Canada")
.label("Simon Perreault\n2875 boul. Laurier, suite D2-630\nQuebec, QC, Canada\nG1V 2M2")
.types(AddressType.WORK)
.noMore();
assertTelephone(vcard)
.uri(new TelUri.Builder("+1-418-656-9254").extension("102").build())
.types(TelephoneType.WORK, TelephoneType.VOICE)
.next()
.uri(new TelUri.Builder("+1-418-262-6501").build())
.types(TelephoneType.WORK, TelephoneType.VOICE, TelephoneType.CELL, TelephoneType.VIDEO, TelephoneType.TEXT)
.noMore();
assertEmail(vcard)
.value("simon.perreault@viagenie.ca")
.types(EmailType.WORK)
.noMore();
assertGeo(vcard)
.latitude(46.766336)
.longitude(-71.28955)
.param("TYPE", "work")
.noMore();
assertBinaryProperty(vcard.getKeys())
.url("http://www.viagenie.ca/simon.perreault/simon.asc")
.param("TYPE", "work")
.noMore();
assertTimezone(vcard)
.text("America/Montreal")
.noMore();
assertSimpleProperty(vcard.getUrls())
.value("http://nomis80.org")
.param("TYPE", "home")
.noMore();
//@formatter:on
assertValidate(vcard).versions(vcard.getVersion()).run();
assertWarnings(0, reader);
}
assertNoMoreVCards(reader);
}
private static XCardReader read(String file) throws SAXException, IOException {
return new XCardReader(XCardReaderTest.class.getResourceAsStream(file));
}
}
| |
/*
* Copyright [2006] [University Corporation for Advanced Internet Development, Inc.]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensaml.xml.util;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import javax.xml.namespace.QName;
import org.opensaml.xml.Configuration;
import org.opensaml.xml.XMLObject;
/**
* A map of attribute names and attribute values that invalidates the DOM of the attribute owning XMLObject when the
* attributes change.
*
* <strong>Note:</strong>
*/
public class AttributeMap implements Map<QName, String> {
/** XMLObject owning the attributes. */
private XMLObject attributeOwner;
/** Map of attributes. */
private Map<QName, String> attributes;
/** Set of attribute QNames which have been locally registered as having an ID type within this
* AttributeMap instance. */
private Set<QName> idAttribNames;
/**
* Constructor.
*
* @param newOwner the XMLObject that owns these attributes
*
* @throws NullPointerException thrown if the given XMLObject is null
*/
public AttributeMap(XMLObject newOwner) throws NullPointerException {
if (newOwner == null) {
throw new NullPointerException("Attribute owner XMLObject may not be null");
}
attributeOwner = newOwner;
attributes = new ConcurrentHashMap<QName, String>();
idAttribNames = new CopyOnWriteArraySet<QName>();
}
/** {@inheritDoc} */
public String put(QName attributeName, String value) {
String oldValue = get(attributeName);
if (value != oldValue) {
releaseDOM();
attributes.put(attributeName, value);
if (isIDAttribute(attributeName) || Configuration.isIDAttribute(attributeName)) {
attributeOwner.getIDIndex().deregisterIDMapping(oldValue);
attributeOwner.getIDIndex().registerIDMapping(value, attributeOwner);
}
}
return oldValue;
}
/** {@inheritDoc} */
public void clear() {
for (QName attributeName : attributes.keySet()) {
remove(attributeName);
}
}
/**
* Returns the set of keys.
*
* @return unmodifiable set of keys
*/
public Set<QName> keySet() {
return Collections.unmodifiableSet(attributes.keySet());
}
/** {@inheritDoc} */
public int size() {
return attributes.size();
}
/** {@inheritDoc} */
public boolean isEmpty() {
return attributes.isEmpty();
}
/** {@inheritDoc} */
public boolean containsKey(Object key) {
return attributes.containsKey(key);
}
/** {@inheritDoc} */
public boolean containsValue(Object value) {
return attributes.containsValue(value);
}
/** {@inheritDoc} */
public String get(Object key) {
return attributes.get(key);
}
/** {@inheritDoc} */
public String remove(Object key) {
String removedValue = attributes.remove(key);
if (removedValue != null) {
releaseDOM();
QName attributeName = (QName) key;
if (isIDAttribute(attributeName) || Configuration.isIDAttribute(attributeName)) {
attributeOwner.getIDIndex().deregisterIDMapping(removedValue);
}
}
return removedValue;
}
/** {@inheritDoc} */
public void putAll(Map<? extends QName, ? extends String> t) {
if (t != null && t.size() > 0) {
for (Entry<? extends QName, ? extends String> entry : t.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
}
/**
* Returns the values in this map.
*
* @return an unmodifiable collection of values
*/
public Collection<String> values() {
return Collections.unmodifiableCollection(attributes.values());
}
/**
* Returns the set of entries.
*
* @return unmodifiable set of entries
*/
public Set<Entry<QName, String>> entrySet() {
return Collections.unmodifiableSet(attributes.entrySet());
}
/**
* Register an attribute as having a type of ID.
*
* @param attributeName the QName of the ID attribute to be registered
*/
public void registerID(QName attributeName) {
if (! idAttribNames.contains(attributeName)) {
idAttribNames.add(attributeName);
}
// In case attribute already has a value,
// register the current value mapping with the XMLObject owner.
if (containsKey(attributeName)) {
attributeOwner.getIDIndex().registerIDMapping(get(attributeName), attributeOwner);
}
}
/**
* Deregister an attribute as having a type of ID.
*
* @param attributeName the QName of the ID attribute to be de-registered
*/
public void deregisterID(QName attributeName) {
if (idAttribNames.contains(attributeName)) {
idAttribNames.remove(attributeName);
}
// In case attribute already has a value,
// deregister the current value mapping with the XMLObject owner.
if (containsKey(attributeName)) {
attributeOwner.getIDIndex().deregisterIDMapping(get(attributeName));
}
}
/**
* Check whether a given attribute is locally registered as having an ID type within
* this AttributeMap instance.
*
* @param attributeName the QName of the attribute to be checked for ID type.
* @return true if attribute is registered as having an ID type.
*/
public boolean isIDAttribute(QName attributeName) {
return idAttribNames.contains(attributeName);
}
/**
* Releases the DOM caching associated XMLObject and its ancestors.
*/
private void releaseDOM() {
attributeOwner.releaseDOM();
attributeOwner.releaseParentDOM(true);
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.instrument.bond;
import org.threeten.bp.ZonedDateTime;
import com.opengamma.analytics.financial.instrument.InstrumentDefinitionVisitor;
import com.opengamma.analytics.financial.instrument.payment.CouponFixedDefinition;
import com.opengamma.analytics.financial.instrument.payment.PaymentFixedDefinition;
import com.opengamma.analytics.financial.interestrate.bond.definition.BondFixedSecurity;
import com.opengamma.analytics.financial.interestrate.bond.definition.BondFixedTransaction;
import com.opengamma.analytics.financial.interestrate.bond.provider.BondSecurityDiscountingMethod;
import com.opengamma.analytics.financial.schedule.ScheduleCalculator;
import com.opengamma.financial.convention.daycount.AccruedInterestCalculator;
import com.opengamma.util.ArgumentChecker;
/**
* Describes a transaction on a fixed coupon bond issue.
*/
public class BondFixedTransactionDefinition extends BondTransactionDefinition<PaymentFixedDefinition, CouponFixedDefinition> {
/**
* The method to compute price from yield.
*/
private static final BondSecurityDiscountingMethod METHOD_BOND = BondSecurityDiscountingMethod.getInstance();
/**
* Accrued interest at settlement date.
*/
private double _accruedInterestAtSettlement;
/**
* Constructor of a fixed coupon bond transaction from all the transaction details.
*
* @param underlyingBond
* The fixed coupon bond underlying the transaction.
* @param quantity
* The number of bonds purchased (can be negative or positive).
* @param settlementDate
* Transaction settlement date.
* @param cleanPrice
* The (clean) price of the transaction in relative term (i.e. 0.90 if the dirty price is 90% of nominal).
*/
public BondFixedTransactionDefinition(final BondFixedSecurityDefinition underlyingBond, final double quantity, final ZonedDateTime settlementDate,
final double cleanPrice) {
super(underlyingBond, quantity, settlementDate, cleanPrice);
_accruedInterestAtSettlement = 0;
final int nbCoupon = underlyingBond.getCoupons().getNumberOfPayments();
final double accruedInterest = AccruedInterestCalculator.getAccruedInterest(getUnderlyingBond().getDayCount(), getCouponIndex(), nbCoupon,
getPreviousAccrualDate(),
settlementDate, getNextAccrualDate(), underlyingBond.getCoupons().getNthPayment(getCouponIndex()).getRate(), underlyingBond.getCouponPerYear(),
underlyingBond.isEOM());
if (underlyingBond.getExCouponDays() != 0 && getNextAccrualDate().minusDays(underlyingBond.getExCouponDays()).isBefore(settlementDate)) {
_accruedInterestAtSettlement = accruedInterest - underlyingBond.getCoupons().getNthPayment(getCouponIndex()).getRate();
} else {
_accruedInterestAtSettlement = accruedInterest;
}
}
/**
* Builder of a fixed coupon bond transaction from the underlying bond and the conventional yield at settlement date.
*
* @param underlyingBond
* The fixed coupon bond underlying the transaction.
* @param quantity
* The number of bonds purchased (can be negative or positive).
* @param settlementDate
* Transaction settlement date.
* @param yield
* The yield quoted in the underlying bond convention at settlement date. The yield is in decimal, i.e. 0.0525 for 5.25%.
* @return The fixed coupon bond.
*/
public static BondFixedTransactionDefinition fromYield(final BondFixedSecurityDefinition underlyingBond, final double quantity,
final ZonedDateTime settlementDate, final double yield) {
ArgumentChecker.notNull(settlementDate, "settlement date");
ArgumentChecker.notNull(underlyingBond, "underlying bond");
final BondFixedSecurity security = underlyingBond.toDerivative(settlementDate, settlementDate);
final double cleanPrice = METHOD_BOND.cleanPriceFromYield(security, yield);
return new BondFixedTransactionDefinition(underlyingBond, quantity, settlementDate, cleanPrice);
}
/**
* Gets the accrued interest at transaction settlement.
*
* @return The accrued interest at settlement.
*/
public double getAccruedInterestAtSettlement() {
return _accruedInterestAtSettlement;
}
/**
* Gets the bond underlying the transaction.
*
* @return The underlying bond.
*/
@Override
public BondFixedSecurityDefinition getUnderlyingBond() {
return (BondFixedSecurityDefinition) super.getUnderlyingBond();
}
/**
* {@inheritDoc}
*
* @deprecated Use the method that does not take yield curve names
*/
@Deprecated
@Override
public BondFixedTransaction toDerivative(final ZonedDateTime date, final String... yieldCurveNames) {
// Implementation note: First yield curve used for coupon and notional (credit), the second for risk free settlement.
ArgumentChecker.notNull(date, "date");
ArgumentChecker.notNull(yieldCurveNames, "yield curve names");
ArgumentChecker.isTrue(yieldCurveNames.length > 0, "at least one curve required");
final ZonedDateTime spot = ScheduleCalculator.getAdjustedDate(date, getUnderlyingBond().getSettlementDays(), getUnderlyingBond().getCalendar());
final BondFixedSecurity bondPurchase = getUnderlyingBond().toDerivative(date, getSettlementDate(), yieldCurveNames);
final BondFixedSecurity bondStandard = getUnderlyingBond().toDerivative(date, yieldCurveNames);
final int nbCoupon = getUnderlyingBond().getCoupons().getNumberOfPayments();
int couponIndex = 0; // The index of the coupon of the spot date.
for (int loopcpn = 0; loopcpn < nbCoupon; loopcpn++) {
if (getUnderlyingBond().getCoupons().getNthPayment(loopcpn).getAccrualEndDate().isAfter(spot)) {
couponIndex = loopcpn;
break;
}
}
final double notionalStandard = getUnderlyingBond().getCoupons().getNthPayment(couponIndex).getNotional();
double price;
if (getSettlementDate().isBefore(date)) { // If settlement already took place, the price is set to 0.
price = 0.0;
} else {
price = getPrice();
}
final BondFixedTransaction result = new BondFixedTransaction(bondPurchase, getQuantity(), price, bondStandard, notionalStandard);
return result;
}
@Override
public BondFixedTransaction toDerivative(final ZonedDateTime date) {
ArgumentChecker.notNull(date, "date");
final ZonedDateTime spot = ScheduleCalculator.getAdjustedDate(date, getUnderlyingBond().getSettlementDays(), getUnderlyingBond().getCalendar());
final BondFixedSecurity bondPurchase = getUnderlyingBond().toDerivative(date, getSettlementDate());
final BondFixedSecurity bondStandard = getUnderlyingBond().toDerivative(date);
final int nbCoupon = getUnderlyingBond().getCoupons().getNumberOfPayments();
int couponIndex = 0; // The index of the coupon of the spot date.
for (int loopcpn = 0; loopcpn < nbCoupon; loopcpn++) {
if (getUnderlyingBond().getCoupons().getNthPayment(loopcpn).getAccrualEndDate().isAfter(spot)) {
couponIndex = loopcpn;
break;
}
}
final double notionalStandard = getUnderlyingBond().getCoupons().getNthPayment(couponIndex).getNotional();
double price;
if (getSettlementDate().toLocalDate().isBefore(date.toLocalDate())) { // Implementation note: If settlement already took place (in day terms), the price is
// set to 0.
price = 0.0;
} else {
price = getPrice();
}
final BondFixedTransaction result = new BondFixedTransaction(bondPurchase, getQuantity(), price, bondStandard, notionalStandard);
return result;
}
@Override
public <U, V> V accept(final InstrumentDefinitionVisitor<U, V> visitor, final U data) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitBondFixedTransactionDefinition(this, data);
}
@Override
public <V> V accept(final InstrumentDefinitionVisitor<?, V> visitor) {
ArgumentChecker.notNull(visitor, "visitor");
return visitor.visitBondFixedTransactionDefinition(this);
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
long temp;
temp = Double.doubleToLongBits(_accruedInterestAtSettlement);
result = prime * result + (int) (temp ^ temp >>> 32);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final BondFixedTransactionDefinition other = (BondFixedTransactionDefinition) obj;
if (Double.doubleToLongBits(_accruedInterestAtSettlement) != Double.doubleToLongBits(other._accruedInterestAtSettlement)) {
return false;
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.DuplicateResourceException;
import org.apache.ambari.server.StaticallyInject;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.predicate.EqualsPredicate;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.RequestStatus;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.orm.dao.RemoteAmbariClusterDAO;
import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity;
import org.apache.ambari.server.orm.entities.RemoteAmbariClusterServiceEntity;
import org.apache.ambari.server.security.authorization.RoleAuthorization;
import org.apache.ambari.server.view.RemoteAmbariClusterRegistry;
import org.apache.ambari.view.MaskException;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Strings;
import com.google.inject.Inject;
/**
* Resource Provider for Remote Cluster
*/
@StaticallyInject
public class RemoteClusterResourceProvider extends AbstractAuthorizedResourceProvider {
/**
* Remote Cluster property id constants.
*/
public static final String CLUSTER_NAME_PROPERTY_ID = "ClusterInfo/name";
public static final String CLUSTER_ID_PROPERTY_ID = "ClusterInfo/cluster_id";
public static final String CLUSTER_URL_PROPERTY_ID = "ClusterInfo/url";
public static final String USERNAME_PROPERTY_ID = "ClusterInfo/username";
public static final String PASSWORD_PROPERTY_ID = "ClusterInfo/password";
public static final String SERVICES_PROPERTY_ID = "ClusterInfo/services";
/**
* The logger.
*/
private final static Logger LOG = LoggerFactory.getLogger(RemoteClusterResourceProvider.class);
/**
* The key property ids for a Remote Cluster resource.
*/
private static Map<Resource.Type, String> keyPropertyIds = new HashMap<>();
static {
keyPropertyIds.put(Resource.Type.RemoteCluster, CLUSTER_NAME_PROPERTY_ID);
}
/**
* The property ids for a Remote Cluster resource.
*/
private static Set<String> propertyIds = new HashSet<>();
static {
propertyIds.add(CLUSTER_NAME_PROPERTY_ID);
propertyIds.add(CLUSTER_ID_PROPERTY_ID);
propertyIds.add(CLUSTER_URL_PROPERTY_ID);
propertyIds.add(USERNAME_PROPERTY_ID);
propertyIds.add(PASSWORD_PROPERTY_ID);
propertyIds.add(SERVICES_PROPERTY_ID);
}
@Inject
private static RemoteAmbariClusterDAO remoteAmbariClusterDAO;
@Inject
private static Configuration configuration;
@Inject
private static RemoteAmbariClusterRegistry remoteAmbariClusterRegistry;
/**
* Create a new resource provider.
*/
protected RemoteClusterResourceProvider() {
super(propertyIds, keyPropertyIds);
EnumSet<RoleAuthorization> requiredAuthorizations = EnumSet.of(RoleAuthorization.AMBARI_ADD_DELETE_CLUSTERS);
setRequiredCreateAuthorizations(requiredAuthorizations);
setRequiredDeleteAuthorizations(requiredAuthorizations);
setRequiredUpdateAuthorizations(requiredAuthorizations);
}
@Override
public Map<Resource.Type, String> getKeyPropertyIds() {
return keyPropertyIds;
}
@Override
protected Set<String> getPKPropertyIds() {
return new HashSet<>(keyPropertyIds.values());
}
@Override
public RequestStatus createResourcesAuthorized(Request request) throws SystemException, UnsupportedPropertyException, ResourceAlreadyExistsException, NoSuchParentResourceException {
for (Map<String, Object> properties : request.getProperties()) {
createResources(getCreateCommand(properties));
}
notifyCreate(Resource.Type.RemoteCluster, request);
return getRequestStatus(null);
}
@Override
public Set<Resource> getResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Set<Resource> resources = new HashSet<>();
Set<String> requestedIds = getRequestPropertyIds(request, predicate);
Set<Map<String, Object>> propertyMaps = getPropertyMaps(predicate);
if (propertyMaps.isEmpty()) {
propertyMaps.add(Collections.emptyMap());
}
for (Map<String, Object> propertyMap : propertyMaps) {
String clusterName = (String) propertyMap.get(CLUSTER_NAME_PROPERTY_ID);
if(!Strings.isNullOrEmpty(clusterName)){
RemoteAmbariClusterEntity cluster = remoteAmbariClusterDAO.findByName(clusterName);
if(cluster == null) {
throw new NoSuchResourceException(String.format("Cluster with name %s cannot be found",clusterName) );
}
resources.add(toResource(requestedIds, cluster));
}else {
for (RemoteAmbariClusterEntity cluster : remoteAmbariClusterDAO.findAll()){
Resource resource = toResource(requestedIds, cluster);
resources.add(resource);
}
}
}
return resources;
}
protected Resource toResource(Set<String> requestedIds, RemoteAmbariClusterEntity cluster) {
Resource resource = new ResourceImpl(Resource.Type.RemoteCluster);
setResourceProperty(resource, CLUSTER_NAME_PROPERTY_ID, cluster.getName(), requestedIds);
setResourceProperty(resource, CLUSTER_ID_PROPERTY_ID, cluster.getId(), requestedIds);
setResourceProperty(resource, CLUSTER_URL_PROPERTY_ID, cluster.getUrl(), requestedIds);
setResourceProperty(resource, USERNAME_PROPERTY_ID, cluster.getUsername(), requestedIds);
ArrayList<String> services = new ArrayList<>();
for (RemoteAmbariClusterServiceEntity remoteClusterServiceEntity : cluster.getServices()) {
services.add(remoteClusterServiceEntity.getServiceName());
}
setResourceProperty(resource, SERVICES_PROPERTY_ID,services, requestedIds);
return resource;
}
@Override
public RequestStatus updateResourcesAuthorized(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
Iterator<Map<String,Object>> iterator = request.getProperties().iterator();
if (iterator.hasNext()) {
for (Map<String, Object> propertyMap : getPropertyMaps(iterator.next(), predicate)) {
modifyResources(getUpdateCommand(propertyMap));
}
}
notifyUpdate(Resource.Type.RemoteCluster, request, predicate);
return getRequestStatus(null);
}
@Override
protected RequestStatus deleteResourcesAuthorized(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
modifyResources(getDeleteCommand(predicate));
notifyDelete(Resource.Type.ViewInstance, predicate);
return getRequestStatus(null);
}
/**
* Get the command to create the RemoteAmbariCluster
* @param properties
* @return A command to create the RemoteAmbariCluster
*/
private Command<Void> getCreateCommand(final Map<String, Object> properties) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
if(StringUtils.isEmpty(name)){
throw new IllegalArgumentException("Cluster Name cannot ne null or Empty");
}
if(remoteAmbariClusterDAO.findByName(name) != null){
throw new DuplicateResourceException(String.format("Remote cluster with name %s already exists",name));
}
saveOrUpdateRemoteAmbariClusterEntity(properties,false);
return null;
}
};
}
/**
* Get the command to update the RemoteAmbariCluster
* @param properties
* @return A command to update the RemoteAmbariCluster
*/
private Command<Void> getUpdateCommand(final Map<String, Object> properties) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
if (StringUtils.isEmpty(name)) {
throw new IllegalArgumentException("Cluster Name cannot be null or Empty");
}
String id = (String)properties.get(CLUSTER_ID_PROPERTY_ID);
if (StringUtils.isEmpty(id)) {
throw new IllegalArgumentException("Cluster Id cannot be null or Empty");
}
saveOrUpdateRemoteAmbariClusterEntity(properties,true);
return null;
}
};
}
/**
* Save or update Remote Ambari Cluster Entity to database
*
* @param properties
* @param update
* @throws AmbariException
*/
private void saveOrUpdateRemoteAmbariClusterEntity(Map<String, Object> properties,boolean update) throws AmbariException {
String name = (String)properties.get(CLUSTER_NAME_PROPERTY_ID);
String url = (String)properties.get(CLUSTER_URL_PROPERTY_ID);
String username = (String)properties.get(USERNAME_PROPERTY_ID);
String password = (String)properties.get(PASSWORD_PROPERTY_ID);
if (StringUtils.isEmpty(url) && StringUtils.isEmpty(username)) {
throw new IllegalArgumentException("Url or username cannot be null");
}
RemoteAmbariClusterEntity entity ;
if (update) {
Long id = Long.valueOf((String) properties.get(CLUSTER_ID_PROPERTY_ID));
entity = remoteAmbariClusterDAO.findById(id);
if (entity == null) {
throw new IllegalArgumentException(String.format("Cannot find cluster with Id : \"%s\"", id));
}
} else {
entity = remoteAmbariClusterDAO.findByName(name);
if (entity != null) {
throw new DuplicateResourceException(String.format("Cluster with name : \"%s\" already exists", name));
}
}
// Check Password not null for create
//Check username matches the entity username if password not present
if(StringUtils.isBlank(password) && !update){
throw new IllegalArgumentException("Password cannot be null");
}else if(StringUtils.isBlank(password) && update && !username.equals(entity.getUsername())){
throw new IllegalArgumentException("Failed to update. Username does not match.");
}
if (entity == null) {
entity = new RemoteAmbariClusterEntity();
}
entity.setName(name);
entity.setUrl(url);
try {
if (password != null) {
entity.setUsername(username);
entity.setPassword(password);
}
} catch (MaskException e) {
throw new IllegalArgumentException("Failed to create new Remote Cluster " + name + ". Illegal Password");
}
try {
remoteAmbariClusterRegistry.saveOrUpdate(entity,update);
} catch (Exception e) {
throw new IllegalArgumentException("Failed to create new Remote Cluster " + name +". " + e.getMessage(),e);
}
}
/**
* Get the command to delete the Cluster
* @param predicate
* @return The delete command
*/
private Command<Void> getDeleteCommand(final Predicate predicate) {
return new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
Comparable deletedCluster = ((EqualsPredicate) predicate).getValue();
String toDelete = deletedCluster.toString();
RemoteAmbariClusterEntity clusterEntity = remoteAmbariClusterDAO.findByName(toDelete);
if(clusterEntity == null){
throw new IllegalArgumentException("The Cluster "+ toDelete +" does not exist");
}
remoteAmbariClusterRegistry.delete(clusterEntity);
return null;
}
};
}
}
| |
/*
* Copyright 2021 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.core.utils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.config.TestObjectMapperConfiguration;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@ContextConfiguration(classes = {TestObjectMapperConfiguration.class})
@RunWith(SpringRunner.class)
@SuppressWarnings("rawtypes")
public class ParametersUtilsTest {
private ParametersUtils parametersUtils;
private JsonUtils jsonUtils;
@Autowired private ObjectMapper objectMapper;
@Before
public void setup() {
parametersUtils = new ParametersUtils(objectMapper);
jsonUtils = new JsonUtils(objectMapper);
}
@Test
public void testReplace() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("version", 2);
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k4", "${name}");
input.put("k5", "${version}");
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1"));
assertEquals("conductor", replaced.get("k4"));
assertEquals(2, replaced.get("k5"));
}
@Test
public void testReplaceWithArrayExpand() {
List<Object> list = new LinkedList<>();
Map<String, Object> map = new HashMap<>();
map.put("externalId", "[{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}]");
map.put("name", "conductor");
map.put("version", 2);
list.add(map);
jsonUtils.expand(list);
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$..externalId}");
input.put("k2", "${$[0].externalId[0].taskRefName}");
input.put("k3", "${__json_externalId.taskRefName}");
input.put("k4", "${$[0].name}");
input.put("k5", "${$[0].version}");
Map<String, Object> replaced = parametersUtils.replace(input, list);
assertNotNull(replaced);
assertEquals(replaced.get("k2"), "t001");
assertNull(replaced.get("k3"));
assertEquals(replaced.get("k4"), "conductor");
assertEquals(replaced.get("k5"), 2);
}
@Test
public void testReplaceWithMapExpand() {
Map<String, Object> map = new HashMap<>();
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
map.put("name", "conductor");
map.put("version", 2);
jsonUtils.expand(map);
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k2", "${externalId.taskRefName}");
input.put("k4", "${name}");
input.put("k5", "${version}");
Map<String, Object> replaced = parametersUtils.replace(input, map);
assertNotNull(replaced);
assertEquals("t001", replaced.get("k2"));
assertNull(replaced.get("k3"));
assertEquals("conductor", replaced.get("k4"));
assertEquals(2, replaced.get("k5"));
}
@Test
public void testReplaceConcurrent() throws ExecutionException, InterruptedException {
ExecutorService executorService = Executors.newFixedThreadPool(2);
AtomicReference<String> generatedId = new AtomicReference<>("test-0");
Map<String, Object> input = new HashMap<>();
Map<String, Object> payload = new HashMap<>();
payload.put("event", "conductor:TEST_EVENT");
payload.put("someId", generatedId);
input.put("payload", payload);
input.put("name", "conductor");
input.put("version", 2);
Map<String, Object> inputParams = new HashMap<>();
inputParams.put("k1", "${payload.someId}");
inputParams.put("k2", "${name}");
CompletableFuture.runAsync(
() -> {
for (int i = 0; i < 10000; i++) {
generatedId.set("test-" + i);
payload.put("someId", generatedId.get());
Object jsonObj = null;
try {
jsonObj =
objectMapper.readValue(
objectMapper.writeValueAsString(input),
Object.class);
} catch (JsonProcessingException e) {
e.printStackTrace();
return;
}
Map<String, Object> replaced =
parametersUtils.replace(inputParams, jsonObj);
assertNotNull(replaced);
assertEquals(generatedId.get(), replaced.get("k1"));
assertEquals("conductor", replaced.get("k2"));
assertNull(replaced.get("k3"));
}
},
executorService)
.get();
executorService.shutdown();
}
// Tests ParametersUtils with Map and List input values, and verifies input map is not mutated
// by ParametersUtils.
@Test
public void testReplaceInputWithMapAndList() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("name", "conductor");
map.put("version", 2);
map.put("externalId", "{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}");
Map<String, Object> input = new HashMap<>();
input.put("k1", "${$.externalId}");
input.put("k2", "${name}");
input.put("k3", "${version}");
input.put("k4", "${}");
input.put("k5", "${ }");
Map<String, String> mapValue = new HashMap<>();
mapValue.put("name", "${name}");
mapValue.put("version", "${version}");
input.put("map", mapValue);
List<String> listValue = new ArrayList<>();
listValue.add("${name}");
listValue.add("${version}");
input.put("list", listValue);
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
// Verify that values are replaced correctly.
assertEquals("{\"taskRefName\":\"t001\",\"workflowId\":\"w002\"}", replaced.get("k1"));
assertEquals("conductor", replaced.get("k2"));
assertEquals(2, replaced.get("k3"));
assertEquals("", replaced.get("k4"));
assertEquals("", replaced.get("k5"));
Map replacedMap = (Map) replaced.get("map");
assertEquals("conductor", replacedMap.get("name"));
assertEquals(2, replacedMap.get("version"));
List replacedList = (List) replaced.get("list");
assertEquals(2, replacedList.size());
assertEquals("conductor", replacedList.get(0));
assertEquals(2, replacedList.get(1));
// Verify that input map is not mutated
assertEquals("${$.externalId}", input.get("k1"));
assertEquals("${name}", input.get("k2"));
assertEquals("${version}", input.get("k3"));
Map inputMap = (Map) input.get("map");
assertEquals("${name}", inputMap.get("name"));
assertEquals("${version}", inputMap.get("version"));
List inputList = (List) input.get("list");
assertEquals(2, inputList.size());
assertEquals("${name}", inputList.get(0));
assertEquals("${version}", inputList.get(1));
}
@Test
public void testReplaceWithEscapedTags() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("someString", "conductor");
map.put("someNumber", 2);
Map<String, Object> input = new HashMap<>();
input.put(
"k1",
"${$.someString} $${$.someNumber}${$.someNumber} ${$.someNumber}$${$.someString}");
input.put("k2", "$${$.someString}afterText");
input.put("k3", "beforeText$${$.someString}");
input.put("k4", "$${$.someString} afterText");
input.put("k5", "beforeText $${$.someString}");
Map<String, String> mapValue = new HashMap<>();
mapValue.put("a", "${someString}");
mapValue.put("b", "${someNumber}");
mapValue.put("c", "$${someString} ${someNumber}");
input.put("map", mapValue);
List<String> listValue = new ArrayList<>();
listValue.add("${someString}");
listValue.add("${someNumber}");
listValue.add("${someString} $${someNumber}");
input.put("list", listValue);
Object jsonObj = objectMapper.readValue(objectMapper.writeValueAsString(map), Object.class);
Map<String, Object> replaced = parametersUtils.replace(input, jsonObj);
assertNotNull(replaced);
// Verify that values are replaced correctly.
assertEquals("conductor ${$.someNumber}2 2${$.someString}", replaced.get("k1"));
assertEquals("${$.someString}afterText", replaced.get("k2"));
assertEquals("beforeText${$.someString}", replaced.get("k3"));
assertEquals("${$.someString} afterText", replaced.get("k4"));
assertEquals("beforeText ${$.someString}", replaced.get("k5"));
Map replacedMap = (Map) replaced.get("map");
assertEquals("conductor", replacedMap.get("a"));
assertEquals(2, replacedMap.get("b"));
assertEquals("${someString} 2", replacedMap.get("c"));
List replacedList = (List) replaced.get("list");
assertEquals(3, replacedList.size());
assertEquals("conductor", replacedList.get(0));
assertEquals(2, replacedList.get(1));
assertEquals("conductor ${someNumber}", replacedList.get(2));
// Verify that input map is not mutated
Map inputMap = (Map) input.get("map");
assertEquals("${someString}", inputMap.get("a"));
assertEquals("${someNumber}", inputMap.get("b"));
assertEquals("$${someString} ${someNumber}", inputMap.get("c"));
// Verify that input list is not mutated
List inputList = (List) input.get("list");
assertEquals(3, inputList.size());
assertEquals("${someString}", inputList.get(0));
assertEquals("${someNumber}", inputList.get(1));
assertEquals("${someString} $${someNumber}", inputList.get(2));
}
@Test
public void getWorkflowInputHandlesNullInputTemplate() {
WorkflowDef workflowDef = new WorkflowDef();
Map<String, Object> inputParams = Map.of("key", "value");
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("value", workflowInput.get("key"));
}
@Test
public void getWorkflowInputFillsInTemplatedFields() {
WorkflowDef workflowDef = new WorkflowDef();
workflowDef.setInputTemplate(Map.of("other_key", "other_value"));
Map<String, Object> inputParams = new HashMap<>(Map.of("key", "value"));
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("value", workflowInput.get("key"));
assertEquals("other_value", workflowInput.get("other_key"));
}
@Test
public void getWorkflowInputPreservesExistingFieldsIfPopulated() {
WorkflowDef workflowDef = new WorkflowDef();
String keyName = "key";
workflowDef.setInputTemplate(Map.of(keyName, "templated_value"));
Map<String, Object> inputParams = new HashMap<>(Map.of(keyName, "supplied_value"));
Map<String, Object> workflowInput =
parametersUtils.getWorkflowInput(workflowDef, inputParams);
assertEquals("supplied_value", workflowInput.get(keyName));
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.aggregation;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.aggregation.state.MaxOrMinByState;
import com.facebook.presto.operator.aggregation.state.MaxOrMinByStateFactory;
import com.facebook.presto.operator.aggregation.state.MaxOrMinByStateSerializer;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.block.BlockBuilderStatus;
import com.facebook.presto.spi.block.VariableWidthBlockBuilder;
import com.facebook.presto.spi.type.AbstractFixedWidthType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarcharType;
import com.facebook.presto.type.TypeRegistry;
import io.airlift.slice.Slices;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Set;
import static com.facebook.presto.block.BlockAssertions.createDoublesBlock;
import static com.facebook.presto.block.BlockAssertions.createStringsBlock;
import static com.facebook.presto.operator.aggregation.AggregationTestUtils.assertAggregation;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet;
import static io.airlift.slice.SizeOf.SIZE_OF_DOUBLE;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
public class TestMaxByAggregation
{
private static final MetadataManager metadata = new MetadataManager();
@BeforeClass
public void setup()
{
((TypeRegistry) metadata.getTypeManager()).addType(CustomDoubleType.CUSTOM_DOUBLE);
}
@Test
public void testAllRegistered()
{
Set<Type> orderableTypes = metadata.getTypeManager()
.getTypes().stream()
.filter(Type::isOrderable)
.collect(toImmutableSet());
for (Type keyType : orderableTypes) {
for (Type valueType : metadata.getTypeManager().getTypes()) {
assertNotNull(metadata.getExactFunction(new Signature("max_by", valueType.getTypeSignature(), valueType.getTypeSignature(), keyType.getTypeSignature())));
}
}
}
@Test
public void testNull()
throws Exception
{
InternalAggregationFunction doubleDouble = metadata.getExactFunction(new Signature("max_by", StandardTypes.DOUBLE, StandardTypes.DOUBLE, StandardTypes.DOUBLE)).getAggregationFunction();
assertAggregation(
doubleDouble,
1.0,
null,
createPage(
new Double[] {1.0, null},
new Double[] {1.0, 2.0}));
}
@Test
public void testDoubleDouble()
throws Exception
{
InternalAggregationFunction doubleDouble = metadata.getExactFunction(new Signature("max_by", StandardTypes.DOUBLE, StandardTypes.DOUBLE, StandardTypes.DOUBLE)).getAggregationFunction();
assertAggregation(
doubleDouble,
1.0,
null,
createPage(
new Double[] {null},
new Double[] {null}),
createPage(
new Double[] {null},
new Double[] {null}));
assertAggregation(
doubleDouble,
1.0,
2.0,
createPage(
new Double[] {3.0, 2.0},
new Double[] {1.0, 1.5}),
createPage(
new Double[] {null},
new Double[] {null}));
}
@Test
public void testDoubleVarchar()
throws Exception
{
InternalAggregationFunction doubleVarchar = metadata.getExactFunction(new Signature("max_by", StandardTypes.VARCHAR, StandardTypes.VARCHAR, StandardTypes.DOUBLE)).getAggregationFunction();
assertAggregation(
doubleVarchar,
1.0,
"a",
createPage(
new String[] {"z", "a"},
new Double[] {1.0, 2.0}),
createPage(
new String[] {null},
new Double[] {null}));
assertAggregation(
doubleVarchar,
1.0,
"hi",
createPage(
new String[] {"zz", "hi"},
new Double[] {0.0, 1.0}),
createPage(
new String[] {null, "a"},
new Double[] {null, -1.0}));
}
@Test
public void testStateDeserializer()
throws Exception
{
String[] keys = new String[] {"loooooong string", "short string"};
double[] values = new double[] { 3.14, 2.71 };
MaxOrMinByStateSerializer serializer = new MaxOrMinByStateSerializer();
BlockBuilder builder = new VariableWidthBlockBuilder(new BlockBuilderStatus());
for (int i = 0; i < keys.length; i++) {
serializer.serialize(makeState(keys[i], values[i]), builder);
}
Block serialized = builder.build();
for (int i = 0; i < keys.length; i++) {
MaxOrMinByState deserialized = new MaxOrMinByStateFactory(DoubleType.DOUBLE, VarcharType.VARCHAR).createSingleState();
serializer.deserialize(serialized, i, deserialized);
assertEquals(VarcharType.VARCHAR.getSlice(deserialized.getKey(), 0), Slices.utf8Slice(keys[i]));
assertEquals(DoubleType.DOUBLE.getDouble(deserialized.getValue(), 0), values[i]);
}
}
private static MaxOrMinByState makeState(String key, double value)
{
MaxOrMinByState result = new MaxOrMinByStateFactory(DoubleType.DOUBLE, VarcharType.VARCHAR).createSingleState();
result.setKey(createStringsBlock(key));
result.setValue(createDoublesBlock(value));
return result;
}
private static Page createPage(Double[] values, Double[] keys)
{
return new Page(createDoublesBlock(values), createDoublesBlock(keys));
}
private static Page createPage(String[] values, Double[] keys)
{
return new Page(createStringsBlock(values), createDoublesBlock(keys));
}
private static class CustomDoubleType
extends AbstractFixedWidthType
{
public static final CustomDoubleType CUSTOM_DOUBLE = new CustomDoubleType();
public static final String NAME = "custom_double";
private CustomDoubleType()
{
super(parseTypeSignature(NAME), double.class, SIZE_OF_DOUBLE);
}
@Override
public boolean isComparable()
{
return true;
}
@Override
public boolean isOrderable()
{
return true;
}
@Override
public Object getObjectValue(ConnectorSession session, Block block, int position)
{
if (block.isNull(position)) {
return null;
}
return block.getDouble(position, 0);
}
@Override
public boolean equalTo(Block leftBlock, int leftPosition, Block rightBlock, int rightPosition)
{
long leftValue = leftBlock.getLong(leftPosition, 0);
long rightValue = rightBlock.getLong(rightPosition, 0);
return leftValue == rightValue;
}
@Override
public int hash(Block block, int position)
{
long value = block.getLong(position, 0);
return (int) (value ^ (value >>> 32));
}
@Override
public int compareTo(Block leftBlock, int leftPosition, Block rightBlock, int rightPosition)
{
double leftValue = leftBlock.getDouble(leftPosition, 0);
double rightValue = rightBlock.getDouble(rightPosition, 0);
return Double.compare(leftValue, rightValue);
}
@Override
public void appendTo(Block block, int position, BlockBuilder blockBuilder)
{
if (block.isNull(position)) {
blockBuilder.appendNull();
}
else {
blockBuilder.writeDouble(block.getDouble(position, 0)).closeEntry();
}
}
@Override
public double getDouble(Block block, int position)
{
return block.getDouble(position, 0);
}
@Override
public void writeDouble(BlockBuilder blockBuilder, double value)
{
blockBuilder.writeDouble(value).closeEntry();
}
}
}
| |
package net.maizegenetics.analysis.gbs.pana;
import net.maizegenetics.plugindef.AbstractPlugin;
import net.maizegenetics.plugindef.DataSet;
import net.maizegenetics.util.ArgsEngine;
import net.maizegenetics.util.DirectoryCrawler;
import net.maizegenetics.util.MultiMemberGZIPInputStream;
import org.apache.log4j.Logger;
import javax.swing.*;
import java.awt.*;
import java.io.*;
import java.util.concurrent.TimeUnit;
import net.maizegenetics.dna.map.TagGWASMap;
/**
* Make predictions based on trained machine learning model. Write predicted values into tagMap file
*
* @author Fei Lu
*/
public class PanAPredictionPlugin extends AbstractPlugin {
static long timePoint1;
private ArgsEngine engine = null;
private Logger logger = Logger.getLogger(PanAPredictionPlugin.class);
String tagMap = null;
String wekaPath = null;
String modelFileS = null;
String boxcoxParemeterFileS = null;
public PanAPredictionPlugin() {
super(null, false);
}
public PanAPredictionPlugin(Frame parentFrame) {
super(parentFrame, false);
}
private void printUsage() {
logger.info(
"\n\nUsage is as follows:\n"
+ " -t input tagMap (e.g. tagGWASMap) file\n"
+ " -m trained machine learning model\n"
+ " -b boxcox paremeter file\n"
+ " -w path of weka library\n");
}
public DataSet performFunction(DataSet input) {
double[] lamdas = null;
try {
BufferedReader br = new BufferedReader (new FileReader(this.boxcoxParemeterFileS), 65536);
br.readLine();
String[] temp = br.readLine().split("\t");
lamdas = new double[temp.length];
for (int i = 0; i < temp.length; i++) lamdas[i] = Double.valueOf(temp[i]);
}
catch(Exception e) {
e.printStackTrace();
}
TagGWASMap tgm = new TagGWASMap(this.tagMap);
File arffFile = new File (new File(this.tagMap).getParent(), "block.arff");
int blockNum = tgm.getBlockNum();
for (int i = 0; i < blockNum; i++) {
int startIndex = i*tgm.getBlockSize();
int endIndex = startIndex+tgm.getBlockSize();
if (endIndex > tgm.getTagCount()) endIndex = tgm.getTagCount();
System.out.println("Start predicting block(Index) " + String.valueOf(i));
this.generateARFFFileS(tgm, lamdas, startIndex, endIndex, arffFile);
double[] predictedValue = this.mkPrediction(arffFile.getAbsolutePath(), startIndex, endIndex);
for (int j = 0; j < predictedValue.length; j++) {
tgm.getTagGWASMapInfo(j+startIndex).setPredictedDistance(predictedValue[j]);
}
tgm.writeBlock(i);
System.out.println("Predicted distance of block(Index) "+ String.valueOf(i)+" is written");
System.out.println("");
}
arffFile.delete();
System.out.println("Prediction completed in " + this.tagMap);
return null;
}
private double[] mkPrediction (String arffFileS, int startIndex, int endIndex) {
String cmd = "java -Xms500m -Xmx5g -cp " + this.wekaPath.replace("\\", "/") + " weka.classifiers.rules.M5Rules -p 0 -T " + arffFileS.replace("\\", "/") + " -l " + this.modelFileS.replace("\\", "/");
System.out.println(cmd);
Runtime rt = Runtime.getRuntime();
Process p;
double[] predictedValue = new double[endIndex-startIndex];
try {
p = rt.exec(cmd);
//p.waitFor();
BufferedReader br = new BufferedReader(new InputStreamReader(p.getInputStream()), 65536);
String temp;
String[] tem;
for (int i = 0; i < 5; i++) br.readLine();
int cnt = 0;
while ((temp = br.readLine()) != null) {
if(temp.isEmpty()) continue;
tem = temp.trim().split("\\s+");
predictedValue[cnt] = Double.valueOf(tem[2]);
cnt++;
}
if (cnt!=predictedValue.length) {
System.out.println("Need to run weka prediction from command line");
System.out.println(cmd);
System.exit(1);
}
br.close();
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
return predictedValue;
}
private void generateARFFFileS (TagGWASMap tgm, double[] lamdas, int startIndex, int endIndex, File arffFile) {
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(arffFile), 65536);
bw.write("@relation predictionTag\n\n");
String[] temp = "TagCount\tTagTaxaCount\tGBinomP\tLRatioSB\tLRatioMB\tGNumSigChr\tGNumSigSite\tGNumSigSiteBC\tGSigWidthBC\tGDist".split("\t");
for (int i = 0; i < temp.length; i++) {
bw.write("@attribute " + temp[i] + " numeric\n");
}
bw.write("\n@data\n");
int cnt = 0;
for (int i = 0; i < tgm.getTagCount(); i++) {
bw.write(tgm.getTagGWASMapInfo(i+startIndex).getBoxcoxAttributesStr(lamdas, ","));
bw.newLine();
if (cnt%100000 == 0) System.out.println(String.valueOf(cnt+1)+" transformed instances are written");
cnt++;
}
bw.flush();
bw.close();
}
catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
System.out.println("Prediciton input file with tag index from " + String.valueOf(startIndex) + " to " + String.valueOf(endIndex) + " is written");
}
@Override
public void setParameters(String[] args) {
if (args.length == 0) {
printUsage();
throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n");
}
if (engine == null) {
engine = new ArgsEngine();
engine.add("-t", "--tagMap-file", true);
engine.add("-m", "--ml-model", true);
engine.add("-b", "--boxcox-file", true);
engine.add("-w", "--weka-path", true);
engine.parse(args);
}
if (engine.getBoolean("-t")) {
this.tagMap = engine.getString("-t");
}
else {
printUsage();
throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n");
}
if (engine.getBoolean("-m")) {
modelFileS = engine.getString("-m");
}
else {
printUsage();
throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n");
}
if (engine.getBoolean("-b")) {
this.boxcoxParemeterFileS = engine.getString("-b");
}
else {
printUsage();
throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n");
}
if (engine.getBoolean("-w")) {
this.wekaPath = engine.getString("-w");
}
else {
printUsage();
throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n");
}
}
@Override
public ImageIcon getIcon() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getButtonName() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String getToolTipText() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| |
/**
*============================================================================
* Copyright The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC, and
* Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-core/LICENSE.txt for details.
*============================================================================
**/
package org.cagrid.mms.service.impl.cadsr;
import gov.nih.nci.cadsr.umlproject.domain.Project;
import gov.nih.nci.cadsr.umlproject.domain.UMLClassMetadata;
import gov.nih.nci.cagrid.metadata.MetadataUtils;
import gov.nih.nci.cagrid.metadata.ServiceMetadata;
import gov.nih.nci.cagrid.metadata.common.SemanticMetadata;
import gov.nih.nci.cagrid.metadata.common.UMLClass;
import gov.nih.nci.cagrid.metadata.service.CaDSRRegistration;
import gov.nih.nci.cagrid.metadata.service.InputParameter;
import gov.nih.nci.cagrid.metadata.service.Operation;
import gov.nih.nci.cagrid.metadata.service.Service;
import gov.nih.nci.cagrid.metadata.service.ServiceContext;
import gov.nih.nci.system.applicationservice.ApplicationException;
import gov.nih.nci.system.applicationservice.ApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.List;
import java.util.Map;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.xml.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* @author oster
*/
public class ServiceMetadataAnnotator {
protected static Log LOG = LogFactory.getLog(ServiceMetadataAnnotator.class.getName());
private ApplicationService defaultcaDSR = null;
private final Map<String, QualifiedProject> uri2ServiceMap;
public ServiceMetadataAnnotator(Map<String, QualifiedProject> uri2ServiceMap, ApplicationService defaultCaDSR) {
if (defaultCaDSR == null) {
throw new IllegalArgumentException("Cannot supply a null default ApplicationService.");
}
this.uri2ServiceMap = uri2ServiceMap;
this.defaultcaDSR = defaultCaDSR;
}
/**
* Add caDSR information to model.
*
* @param metadata
* @throws CaDSRGeneralException
*/
public void annotateServiceMetadata(ServiceMetadata metadata) throws CaDSRGeneralException {
if (metadata == null || metadata.getServiceDescription() == null
|| metadata.getServiceDescription().getService() == null) {
return;
}
Service service = metadata.getServiceDescription().getService();
// TODO: how to set caDSR registration?
CaDSRRegistration caDSRRegistration = service.getCaDSRRegistration();
// TODO: set/edit service semantic metadata once service's are
// registered in caDSR
SemanticMetadata[] semanticMetadatas = service.getSemanticMetadata();
if (service.getServiceContextCollection() == null
|| service.getServiceContextCollection().getServiceContext() == null) {
return;
}
ServiceContext[] serviceContexts = service.getServiceContextCollection().getServiceContext();
for (int i = 0; i < serviceContexts.length; i++) {
annotateServiceContext(serviceContexts[i]);
}
}
protected void annotateServiceContext(ServiceContext context) throws CaDSRGeneralException {
if (context == null || context.getOperationCollection() == null
|| context.getOperationCollection().getOperation() == null) {
return;
}
Operation[] operations = context.getOperationCollection().getOperation();
for (int i = 0; i < operations.length; i++) {
annotateOperation(operations[i]);
}
}
protected void annotateOperation(Operation operation) throws CaDSRGeneralException {
if (operation == null) {
return;
}
// TODO: set/edit operation semantic metadata once services are
// registered in caDSR
SemanticMetadata[] semanticMetadatas = operation.getSemanticMetadata();
// process input
if (operation.getInputParameterCollection() != null
&& operation.getInputParameterCollection().getInputParameter() != null) {
InputParameter[] inputParameters = operation.getInputParameterCollection().getInputParameter();
for (int i = 0; i < inputParameters.length; i++) {
InputParameter in = inputParameters[i];
QName qname = in.getQName();
UMLClass uml = getUMLClassForQName(qname);
if (uml != null) {
LOG.debug("Successfully processed:" + qname);
in.setUMLClass(uml);
}
}
}
// process output
if (operation.getOutput() != null) {
QName qname = operation.getOutput().getQName();
UMLClass uml = getUMLClassForQName(qname);
if (uml != null) {
LOG.debug("Successfully processed:" + qname);
operation.getOutput().setUMLClass(uml);
}
}
}
/**
* @param qname
* @return The UML Class matching the QName
* @throws CaDSRGeneralException
*/
protected UMLClass getUMLClassForQName(QName qname) throws CaDSRGeneralException {
// look up the UMLClassMetadata we are looking for, based on the QName
UMLClassMetadata classMetadata = getUMLClassMetadataForQName(qname);
if (classMetadata == null) {
return null;
}
UMLClass result = null;
try {
String shortName = classMetadata.getProject().getShortName();
String version = classMetadata.getProject().getVersion();
ApplicationService cadsr = defaultcaDSR;
QualifiedProject proj = uri2ServiceMap.get(qname.getNamespaceURI());
if (proj != null) {
cadsr = proj.getSourceAppServ();
}
result = CaDSRUtils.convertClassToUMLClass(cadsr, shortName, version, classMetadata);
} catch (ApplicationException e) {
LOG.error("Problem converting class to metadata", e);
}
return result;
}
/**
* NOTE: we used to qualify the Project with the ClassificationScheme
* Context name, but there's not really a way to get that information now
* (as Project has a unidirectional assoc to ClassificationScheme, so we
* can't use the dataservice to query for the CS of a given Project); now
* one would need to supply the publicID of the Project of interest if there
* were multiple contexts using the same project name and version
*
* @param qname
* @return The UMLClassMetadata matching the qname
* @throws CaDSRGeneralException
*/
protected UMLClassMetadata getUMLClassMetadataForQName(QName qname) throws CaDSRGeneralException {
ApplicationService cadsr = null;
Project projPrototype = null;
// if there are user supplied mappings, we need to try to use the
// applicationservice and project identifiers from those
if (this.uri2ServiceMap != null) {
LOG.debug("Looking for suitable namespace mapping in supplied map.");
QualifiedProject proj = uri2ServiceMap.get(qname.getNamespaceURI());
// the user supplied a project to be used for this namespace, so get
// the appserv from it, as well as the project prototype
if (proj != null) {
LOG.debug("Using supplied Project (" + proj.getProjectPrototype().getShortName() + ") version ("
+ proj.getProjectPrototype().getVersion() + ") for Qname (" + qname + ").");
cadsr = proj.getSourceAppServ();
projPrototype = proj.getProjectPrototype();
}
}
// we never found a suitable mapping, so use default
if (cadsr == null) {
LOG.debug("No suitable namespace mapping found; using default ApplicationService.");
cadsr = this.defaultcaDSR;
}
// create a prototype class
UMLClassMetadata prototype = new UMLClassMetadata();
// the Project qualifier of the class may have been set above using the
// supplied mappings, but if not, the QName is expected to be uniquely
// used by a single Class
if (projPrototype != null) {
prototype.setProject(projPrototype);
} else {
LOG.debug("No suitable namespace to Project mapping found; issuing a non-project-qualified query.");
}
prototype.setGmeNamespace(qname.getNamespaceURI());
prototype.setGmeXMLElement(qname.getLocalPart());
List rList = null;
try {
rList = cadsr.search(UMLClassMetadata.class, prototype);
} catch (ApplicationException e) {
LOG.error(
"Unable to locate UMLClassMetadata for QName (" + qname + "); skipping because:" + e.getMessage(), e);
return null;
}
if (rList == null || rList.size() == 0) {
LOG.error("Unable to locate UMLClassMetadata for QName (" + qname
+ "); skipping because no results were returned from ApplicationService.");
return null;
}
if (rList.size() > 1) {
LOG.info("Processing of UMLClassMetadata for QName (" + qname
+ ") returned more than 1 result, using first.");
}
return (UMLClassMetadata) rList.get(0);
}
public static void main(String[] args) {
try {
JFrame f = new JFrame();
f.setVisible(true);
JFileChooser fc = new JFileChooser(".");
fc.showOpenDialog(f);
File selectedFile = fc.getSelectedFile();
ServiceMetadata model = MetadataUtils.deserializeServiceMetadata(new FileReader(selectedFile));
ApplicationService appService = ApplicationServiceProvider
.getApplicationServiceFromUrl("http://cadsrapi-prod2.nci.nih.gov/cadsrapi40/");
ServiceMetadataAnnotator anno = new ServiceMetadataAnnotator(null, appService);
anno.annotateServiceMetadata(model);
File result = new File(".", selectedFile.getName() + "_annotated");
MetadataUtils.serializeServiceMetadata(model, new FileWriter(result));
System.exit(0);
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
}
}
| |
/*
* $RCSfile: J2KReadState.java,v $
*
*
* Copyright (c) 2005 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistribution of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN MIDROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL
* NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF
* USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR
* ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL,
* CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND
* REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR
* INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for
* use in the design, construction, operation or maintenance of any
* nuclear facility.
*
* $Revision: 1.8 $
* $Date: 2006-10-03 23:40:14 $
* $State: Exp $
*/
package com.sun.media.imageioimpl.plugins.jpeg2000;
import javax.imageio.IIOException;
import javax.imageio.ImageReader;
import javax.imageio.ImageReadParam;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.spi.ImageReaderSpi;
import javax.imageio.stream.ImageInputStream;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Transparency;
import java.awt.color.ColorSpace;
import java.awt.image.BufferedImage;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.ComponentSampleModel;
import java.awt.image.DirectColorModel;
import java.awt.image.IndexColorModel;
import java.awt.image.MultiPixelPackedSampleModel;
import java.awt.image.PixelInterleavedSampleModel;
import java.awt.image.Raster;
import java.awt.image.RenderedImage;
import java.awt.image.SampleModel;
import java.awt.image.SinglePixelPackedSampleModel;
import java.awt.image.WritableRaster;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Hashtable;
import java.util.Iterator;
import jj2000.j2k.quantization.dequantizer.*;
import jj2000.j2k.wavelet.synthesis.*;
import jj2000.j2k.image.invcomptransf.*;
import jj2000.j2k.fileformat.reader.*;
import jj2000.j2k.codestream.reader.*;
import jj2000.j2k.entropy.decoder.*;
import jj2000.j2k.codestream.*;
import jj2000.j2k.decoder.*;
import jj2000.j2k.image.*;
import jj2000.j2k.util.*;
import jj2000.j2k.roi.*;
import jj2000.j2k.io.*;
import jj2000.j2k.*;
import com.sun.media.imageioimpl.common.ImageUtil;
public class J2KReadState {
/** The input stream we read from */
private ImageInputStream iis = null;
private FileFormatReader ff;
private HeaderInfo hi;
private HeaderDecoder hd;
private RandomAccessIO in;
private BitstreamReaderAgent breader;
private EntropyDecoder entdec;
private ROIDeScaler roids;
private Dequantizer deq;
private InverseWT invWT;
private InvCompTransf ictransf;
private ImgDataConverter converter,converter2;
private DecoderSpecs decSpec = null;
private J2KImageReadParamJava j2krparam = null;
private int[] destinationBands = null;
private int[] sourceBands = null;
private int[] levelShift = null; // level shift for each component
private int[] minValues = null; // The min values
private int[] maxValues = null; // The max values
private int[] fracBits = null; // fractional bits for each component
private DataBlkInt[] dataBlocks = null; // data-blocks to request data from src
private int[] bandOffsets = null;
private int maxDepth = 0;
private boolean isSigned = false;
private ColorModel colorModel = null;
private SampleModel sampleModel = null;
private int nComp = 0;
private int tileWidth = 0;
private int tileHeight = 0;
/** Source to destination transform */
private int scaleX, scaleY, xOffset, yOffset;
private Rectangle destinationRegion = null;
private Point sourceOrigin;
/** Tile grid offsets of the source, also used for destination. */
private int tileXOffset, tileYOffset;
private int width;
private int height;
private int[] pixbuf = null;
private byte[] bytebuf = null;
private int[] channelMap = null;
private boolean noTransform = true;
/** The resolution level requested. */
private int resolution;
/** The subsampling step sizes. */
private int stepX, stepY;
/** Tile step sizes. */
private int tileStepX, tileStepY;
private J2KMetadata metadata;
private BufferedImage destImage;
/** Cache the <code>J2KImageReader</code> which creates this object. This
* variable is used to monitor the abortion.
*/
private J2KImageReader reader;
/** Constructs <code>J2KReadState</code>.
* @param iis The input stream.
* @param param The reading parameters.
* @param metadata The <code>J2KMetadata</code> to cache the metadata read
* from the input stream.
* @param reader The <code>J2KImageReader</code> which holds this state.
* It is necessary for processing abortion.
* @throw IllegalArgumentException If the provided <code>iis</code>,
* <code>param</code> or <code>metadata</code> is <code>null</code>.
*/
public J2KReadState(ImageInputStream iis,
J2KImageReadParamJava param,
J2KMetadata metadata,
J2KImageReader reader) {
if (iis == null || param == null || metadata == null)
throw new IllegalArgumentException(I18N.getString("J2KReadState0"));
this.iis = iis;
this.j2krparam = param;
this.metadata = metadata;
this.reader = reader;
initializeRead(0, param, metadata);
}
/** Constructs <code>J2KReadState</code>.
* @param iis The input stream.
* @param param The reading parameters.
* @param reader The <code>J2KImageReader</code> which holds this state.
* It is necessary for processing abortion.
* @throw IllegalArgumentException If the provided <code>iis</code>,
* or <code>param</code> is <code>null</code>.
*/
public J2KReadState(ImageInputStream iis,
J2KImageReadParamJava param,
J2KImageReader reader) {
if (iis == null || param == null)
throw new IllegalArgumentException(I18N.getString("J2KReadState0"));
this.iis = iis;
this.j2krparam = param;
this.reader = reader;
initializeRead(0, param, null);
}
public int getWidth() throws IOException {
return width;
}
public int getHeight() throws IOException {
return height;
}
public HeaderDecoder getHeader() {
return hd;
}
public Raster getTile(int tileX, int tileY,
WritableRaster raster) throws IOException {
Point nT = ictransf.getNumTiles(null);
if (noTransform) {
if (tileX >= nT.x || tileY >= nT.y)
throw new IllegalArgumentException(I18N.getString("J2KImageReader0"));
ictransf.setTile(tileX*tileStepX, tileY*tileStepY);
// The offset of the active tiles is the same for all components,
// since we don't support different component dimensions.
int tOffx;
int tOffy;
int cTileWidth;
int cTileHeight;
if(raster != null &&
(this.resolution < hd.getDecoderSpecs().dls.getMin()) ||
stepX != 1 || stepY != 1) {
tOffx = raster.getMinX();
tOffy = raster.getMinY();
cTileWidth = Math.min(raster.getWidth(),
ictransf.getTileWidth());
cTileHeight = Math.min(raster.getHeight(),
ictransf.getTileHeight());
} else {
tOffx = ictransf.getCompULX(0) -
(ictransf.getImgULX() + ictransf.getCompSubsX(0) - 1) /
ictransf.getCompSubsX(0) + destinationRegion.x;
tOffy = ictransf.getCompULY(0)-
(ictransf.getImgULY() + ictransf.getCompSubsY(0) - 1) /
ictransf.getCompSubsY(0) + destinationRegion.y;
cTileWidth = ictransf.getTileWidth();
cTileHeight = ictransf.getTileHeight();
}
if (raster == null)
raster = Raster.createWritableRaster(sampleModel,
new Point(tOffx, tOffy));
int numBands = sampleModel.getNumBands();
if (tOffx + cTileWidth >=
destinationRegion.width + destinationRegion.x)
cTileWidth =
destinationRegion.width + destinationRegion.x - tOffx;
if (tOffy + cTileHeight >=
destinationRegion.height + destinationRegion.y)
cTileHeight =
destinationRegion.height + destinationRegion.y - tOffy;
//create the line buffer for pixel data if it is not large enough
// or null
if (pixbuf == null || pixbuf.length < cTileWidth * numBands)
pixbuf = new int[cTileWidth * numBands];
boolean prog = false;
// Deliver in lines to reduce memory usage
for (int l=0; l < cTileHeight;l++) {
if (reader.getAbortRequest())
break;
// Request line data
for (int i = 0; i < numBands; i++) {
if (reader.getAbortRequest())
break;
DataBlkInt db = dataBlocks[i];
db.ulx = 0;
db.uly = l;
db.w = cTileWidth;
db.h = 1;
ictransf.getInternCompData(db, channelMap[sourceBands[i]]);
prog = prog || db.progressive;
int[] data = db.data;
int k1 = db.offset + cTileWidth - 1;
int fracBit = fracBits[i];
int lS = levelShift[i];
int min = minValues[i];
int max = maxValues[i];
if (ImageUtil.isBinary(sampleModel)) {
// Force min max to 0 and 1.
min = 0;
max = 1;
if (bytebuf == null || bytebuf.length < cTileWidth * numBands)
bytebuf = new byte[cTileWidth * numBands];
for (int j = cTileWidth - 1;
j >= 0; j--) {
int tmp = (data[k1--] >> fracBit) + lS;
bytebuf[j] =
(byte)((tmp < min) ? min :
((tmp > max) ? max : tmp));
}
ImageUtil.setUnpackedBinaryData(bytebuf,
raster,
new Rectangle(tOffx,
tOffy + l,
cTileWidth,
1));
} else {
for (int j = cTileWidth - 1;
j >= 0; j--) {
int tmp = (data[k1--] >> fracBit) + lS;
pixbuf[j] = (tmp < min) ? min :
((tmp > max) ? max : tmp);
}
raster.setSamples(tOffx,
tOffy + l,
cTileWidth,
1,
destinationBands[i],
pixbuf);
}
}
}
} else {
readSubsampledRaster(raster);
}
return raster;
}
public Rectangle getDestinationRegion() {
return destinationRegion;
}
public BufferedImage readBufferedImage() throws IOException {
colorModel = getColorModel();
sampleModel = getSampleModel();
WritableRaster raster = null;
BufferedImage image = j2krparam.getDestination();
int x = destinationRegion.x;
int y = destinationRegion.y;
destinationRegion.setLocation(j2krparam.getDestinationOffset());
if (image == null) {
// If the destination type is specified, use the color model of it.
ImageTypeSpecifier type = j2krparam.getDestinationType();
if (type != null)
colorModel = type.getColorModel();
raster = Raster.createWritableRaster(
sampleModel.createCompatibleSampleModel(destinationRegion.x +
destinationRegion.width,
destinationRegion.y +
destinationRegion.height),
new Point(0, 0));
image = new BufferedImage(colorModel, raster,
colorModel.isAlphaPremultiplied(),
new Hashtable());
} else
raster = image.getWritableTile(0, 0);
destImage = image;
readSubsampledRaster(raster);
destinationRegion.setLocation(x, y);
destImage = null;
return image;
}
public Raster readAsRaster() throws IOException {
BufferedImage image = j2krparam.getDestination();
WritableRaster raster = null;
if (image == null) {
raster = Raster.createWritableRaster(
sampleModel.createCompatibleSampleModel(destinationRegion.x +
destinationRegion.width,
destinationRegion.y +
destinationRegion.height),
new Point(0, 0));
} else
raster = image.getWritableTile(0, 0);
readSubsampledRaster(raster);
return raster;
}
private void initializeRead(int imageIndex, J2KImageReadParamJava param,
J2KMetadata metadata) {
try {
iis.mark();
in = new IISRandomAccessIO(iis);
// **** File Format ****
// If the codestream is wrapped in the jp2 fileformat, Read the
// file format wrapper
ff = new FileFormatReader(in, metadata);
ff.readFileFormat();
in.seek(ff.getFirstCodeStreamPos());
hi = new HeaderInfo();
try{
hd = new HeaderDecoder(in, j2krparam, hi);
} catch(EOFException e){
throw new RuntimeException(I18N.getString("J2KReadState2"));
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
this.width = hd.getImgWidth();
this.height = hd.getImgHeight();
Rectangle sourceRegion = param.getSourceRegion();
sourceOrigin = new Point();
sourceRegion =
new Rectangle(hd.getImgULX(), hd.getImgULY(),
this.width, this.height);
// if the subsample rate for components are not consistent
boolean compConsistent = true;
stepX = hd.getCompSubsX(0);
stepY = hd.getCompSubsY(0);
for (int i = 1; i < nComp; i++) {
if (stepX != hd.getCompSubsX(i) || stepY != hd.getCompSubsY(i))
throw new RuntimeException(I18N.getString("J2KReadState12"));
}
// Get minimum number of resolution levels available across
// all tile-components.
int minResLevels = hd.getDecoderSpecs().dls.getMin();
// Set current resolution level.
this.resolution = param != null ?
param.getResolution() : minResLevels;
if(resolution < 0 || resolution > minResLevels) {
resolution = minResLevels;
}
// Convert source region to lower resolution level.
if(resolution != minResLevels || stepX != 1 || stepY != 1) {
sourceRegion =
J2KImageReader.getReducedRect(sourceRegion, minResLevels,
resolution, stepX, stepY);
}
destinationRegion = (Rectangle)sourceRegion.clone();
J2KImageReader.computeRegionsWrapper(param,
false,
this.width,
this.height,
param.getDestination(),
sourceRegion,
destinationRegion);
sourceOrigin = new Point(sourceRegion.x, sourceRegion.y);
scaleX = param.getSourceXSubsampling();
scaleY = param.getSourceYSubsampling();
xOffset = param.getSubsamplingXOffset();
yOffset = param.getSubsamplingYOffset();
this.width = destinationRegion.width;
this.height = destinationRegion.height;
Point tileOffset = hd.getTilingOrigin(null);
this.tileWidth = hd.getNomTileWidth();
this.tileHeight = hd.getNomTileHeight();
// Convert tile 0 to lower resolution level.
if(resolution != minResLevels || stepX != 1 || stepY != 1) {
Rectangle tileRect = new Rectangle(tileOffset);
tileRect.width = tileWidth;
tileRect.height = tileHeight;
tileRect =
J2KImageReader.getReducedRect(tileRect, minResLevels,
resolution, stepX, stepY);
tileOffset = tileRect.getLocation();
tileWidth = tileRect.width;
tileHeight = tileRect.height;
}
tileXOffset = tileOffset.x;
tileYOffset = tileOffset.y;
// Set the tile step sizes. These values are used because it
// is possible that tiles will be empty. In particular at lower
// resolution levels when subsampling is used this may be the
// case. This method of calculation will work at least for
// Profile-0 images.
if(tileWidth*(1 << (minResLevels - resolution))*stepX >
hd.getNomTileWidth()) {
tileStepX =
(tileWidth*(1 << (minResLevels - resolution))*stepX +
hd.getNomTileWidth() - 1)/hd.getNomTileWidth();
} else {
tileStepX = 1;
}
if(tileHeight*(1 << (minResLevels - resolution))*stepY >
hd.getNomTileHeight()) {
tileStepY =
(tileHeight*(1 << (minResLevels - resolution))*stepY +
hd.getNomTileHeight() - 1)/hd.getNomTileHeight();
} else {
tileStepY = 1;
}
if (!destinationRegion.equals(sourceRegion))
noTransform = false;
// **** Header decoder ****
// Instantiate header decoder and read main header
decSpec = hd.getDecoderSpecs();
// **** Instantiate decoding chain ****
// Get demixed bitdepths
nComp = hd.getNumComps();
int[] depth = new int[nComp];
for (int i=0; i<nComp;i++)
depth[i] = hd.getOriginalBitDepth(i);
//Get channel mapping
ChannelDefinitionBox cdb = null;
if (metadata != null)
cdb = (ChannelDefinitionBox)metadata.getElement("JPEG2000ChannelDefinitionBox");
channelMap = new int[nComp];
if (cdb != null &&
metadata.getElement("JPEG2000PaletteBox") == null) {
short[] assoc = cdb.getAssociation();
short[] types = cdb.getTypes();
short[] channels = cdb.getChannel();
for (int i = 0; i < types.length; i++)
if (types[i] == 0)
channelMap[channels[i]] = assoc[i] - 1;
else if (types[i] == 1 || types[i] == 2)
channelMap[channels[i]] = channels[i];
} else {
for (int i = 0; i < nComp; i++)
channelMap[i] = i;
}
// **** Bitstream reader ****
try {
boolean logJJ2000Messages =
Boolean.getBoolean("jj2000.j2k.decoder.log");
breader =
BitstreamReaderAgent.createInstance(in, hd,
j2krparam, decSpec,
logJJ2000Messages, hi);
} catch (IOException e) {
throw new RuntimeException(I18N.getString("J2KReadState3") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
} catch (IllegalArgumentException e) {
throw new RuntimeException(I18N.getString("J2KReadState4") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
}
// **** Entropy decoder ****
try {
entdec = hd.createEntropyDecoder(breader, j2krparam);
} catch (IllegalArgumentException e) {
throw new RuntimeException(I18N.getString("J2KReadState5") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
}
// **** ROI de-scaler ****
try {
roids = hd.createROIDeScaler(entdec, j2krparam, decSpec);
} catch (IllegalArgumentException e) {
throw new RuntimeException(I18N.getString("J2KReadState6") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
}
// **** Dequantizer ****
try {
deq = hd.createDequantizer(roids, depth, decSpec);
} catch (IllegalArgumentException e) {
throw new RuntimeException(I18N.getString("J2KReadState7") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
}
// **** Inverse wavelet transform ***
try {
// full page inverse wavelet transform
invWT = InverseWT.createInstance(deq,decSpec);
} catch (IllegalArgumentException e) {
throw new RuntimeException(I18N.getString("J2KReadState8") + " " +
((e.getMessage() != null) ?
(":\n"+e.getMessage()) : ""));
}
int res = breader.getImgRes();
int mrl = decSpec.dls.getMin();
invWT.setImgResLevel(res);
// **** Data converter **** (after inverse transform module)
converter = new ImgDataConverter(invWT,0);
// **** Inverse component transformation ****
ictransf = new InvCompTransf(converter, decSpec, depth);
// If the destination band is set used it
sourceBands = j2krparam.getSourceBands();
if (sourceBands == null) {
sourceBands = new int[nComp];
for (int i = 0; i < nComp; i++)
sourceBands[i] = i;
}
nComp = sourceBands.length;
destinationBands = j2krparam.getDestinationBands();
if (destinationBands == null) {
destinationBands = new int[nComp];
for (int i = 0; i < nComp; i++)
destinationBands[i] = i;
}
J2KImageReader.checkReadParamBandSettingsWrapper(param,
hd.getNumComps(),
destinationBands.length);
levelShift = new int[nComp];
minValues = new int[nComp];
maxValues = new int[nComp];
fracBits = new int[nComp];
dataBlocks = new DataBlkInt[nComp];
depth = new int[nComp];
bandOffsets = new int[nComp];
maxDepth = 0;
isSigned = false;
for (int i=0; i<nComp;i++) {
depth[i] = hd.getOriginalBitDepth(sourceBands[i]);
if (depth[i] > maxDepth)
maxDepth = depth[i];
dataBlocks[i] = new DataBlkInt();
//XXX: may need to change if ChannelDefinition is used to
// define the color channels, such as BGR order
bandOffsets[i] = i;
if (hd.isOriginalSigned(sourceBands[i]))
isSigned = true;
else {
levelShift[i] =
1<<(ictransf.getNomRangeBits(sourceBands[i])-1);
}
// Get the number of bits in the image, and decide what the max
// value should be, depending on whether it is signed or not
int nomRangeBits = ictransf.getNomRangeBits(sourceBands[i]);
maxValues[i] = (1 << (isSigned == true ? (nomRangeBits-1) :
nomRangeBits)) - 1;
minValues[i] = isSigned ? -(maxValues[i]+1) : 0;
fracBits[i] = ictransf.getFixedPoint(sourceBands[i]);
}
iis.reset();
} catch (IllegalArgumentException e){
throw new RuntimeException(e.getMessage(), e);
} catch (Error e) {
if(e.getMessage()!=null)
throw new RuntimeException(e.getMessage(), e);
else {
throw new RuntimeException(I18N.getString("J2KReadState9"), e);
}
} catch (RuntimeException e) {
if(e.getMessage()!=null)
throw new RuntimeException(I18N.getString("J2KReadState10") + " " +
e.getMessage(), e);
else {
throw new RuntimeException(I18N.getString("J2KReadState10"), e);
}
} catch (Throwable e) {
throw new RuntimeException(I18N.getString("J2KReadState10"), e);
}
}
private Raster readSubsampledRaster(WritableRaster raster) throws IOException {
if (raster == null)
raster = Raster.createWritableRaster(
sampleModel.createCompatibleSampleModel(destinationRegion.x +
destinationRegion.width,
destinationRegion.y +
destinationRegion.height),
new Point(destinationRegion.x, destinationRegion.y));
int pixbuf[] = null; // line buffer for pixel data
boolean prog = false; // Flag for progressive data
Point nT = ictransf.getNumTiles(null);
int numBands = sourceBands.length;
Rectangle destRect = raster.getBounds().intersection(destinationRegion);
int offx = destinationRegion.x;
int offy = destinationRegion.y;
int sourceSX = (destRect.x - offx) * scaleX + sourceOrigin.x;
int sourceSY = (destRect.y - offy) * scaleY + sourceOrigin.y;
int sourceEX = (destRect.width - 1)* scaleX + sourceSX;
int sourceEY = (destRect.height - 1) * scaleY + sourceSY;
int startXTile = (sourceSX - tileXOffset) / tileWidth;
int startYTile = (sourceSY - tileYOffset) / tileHeight;
int endXTile = (sourceEX - tileXOffset) / tileWidth;
int endYTile = (sourceEY - tileYOffset) / tileHeight;
startXTile = clip(startXTile, 0, nT.x - 1);
startYTile = clip(startYTile, 0, nT.y - 1);
endXTile = clip(endXTile, 0, nT.x - 1);
endYTile = clip(endYTile, 0, nT.y - 1);
int totalXTiles = endXTile - startXTile + 1;
int totalYTiles = endYTile - startYTile + 1;
int totalTiles = totalXTiles * totalYTiles;
// Start the data delivery to the cached consumers tile by tile
for(int y=startYTile; y <= endYTile; y++){
if (reader.getAbortRequest())
break;
// Loop on horizontal tiles
for(int x=startXTile; x <= endXTile; x++){
if (reader.getAbortRequest())
break;
float initialFraction =
(x - startXTile + (y - startYTile)*totalXTiles)/totalTiles;
ictransf.setTile(x*tileStepX,y*tileStepY);
int sx = hd.getCompSubsX(0);
int cTileWidth = (ictransf.getTileWidth() + sx - 1)/sx;
int sy = hd.getCompSubsY(0);
int cTileHeight = (ictransf.getTileHeight() + sy - 1)/sy;
// Offsets within the tile.
int tx = 0;
int ty = 0;
// The region for this tile
int startX = tileXOffset + x * tileWidth;
int startY = tileYOffset + y * tileHeight;
// sourceSX is guaranteed to be >= startX
if (sourceSX > startX) {
if(startX >= hd.getImgULX()) {
tx = sourceSX - startX; // Intra-tile offset.
cTileWidth -= tx; // Reduce effective width.
}
startX = sourceSX; // Absolute position.
}
// sourceSY is guaranteed to be >= startY
if (sourceSY > startY) {
if(startY >= hd.getImgULY()) {
ty = sourceSY - startY; // Intra-tile offset.
cTileHeight -= ty; // Reduce effective width.
}
startY = sourceSY; // Absolute position.
}
// Decrement dimensions if end position is within tile.
if (sourceEX < startX + cTileWidth - 1) {
cTileWidth += sourceEX - startX - cTileWidth + 1;
}
if (sourceEY < startY + cTileHeight - 1) {
cTileHeight += sourceEY - startY - cTileHeight + 1;
}
// The start X in the destination
int x1 = (startX + scaleX - 1 - sourceOrigin.x) / scaleX;
int x2 = (startX + scaleX -1 + cTileWidth - sourceOrigin.x) /
scaleX;
int lineLength = x2 - x1;
if (pixbuf == null || pixbuf.length < lineLength)
pixbuf = new int[lineLength]; // line buffer for pixel data
x2 = (x2 - 1) * scaleX + sourceOrigin.x - startX;
int y1 = (startY + scaleY -1 - sourceOrigin.y) /scaleY;
x1 += offx;
y1 += offy;
// Deliver in lines to reduce memory usage
for (int l = ty, m = y1;
l < ty + cTileHeight;
l += scaleY, m++) {
if (reader.getAbortRequest())
break;
// Request line data
for (int i = 0; i < numBands; i++) {
DataBlkInt db = dataBlocks[i];
db.ulx = tx;
db.uly = l;
db.w = cTileWidth;
db.h = 1;
ictransf.getInternCompData(db, channelMap[sourceBands[i]]);
prog = prog || db.progressive;
int[] data = db.data;
int k1 = db.offset + x2;
int fracBit = fracBits[i];
int lS = levelShift[i];
int min = minValues[i];
int max = maxValues[i];
if (ImageUtil.isBinary(sampleModel)) {
// Force min max to 0 and 1.
min = 0;
max = 1;
if (bytebuf == null || bytebuf.length < cTileWidth * numBands)
bytebuf = new byte[cTileWidth * numBands];
for (int j = lineLength - 1; j >= 0; j--, k1-=scaleX) {
int tmp = (data[k1] >> fracBit) + lS;
bytebuf[j] =
(byte)((tmp < min) ? min :
((tmp > max) ? max : tmp));
}
ImageUtil.setUnpackedBinaryData(bytebuf,
raster,
new Rectangle(x1,
m,
lineLength,
1));
} else {
for (int j = lineLength - 1; j >= 0; j--, k1-=scaleX) {
int tmp = (data[k1] >> fracBit) + lS;
pixbuf[j] = (tmp < min) ? min :
((tmp > max) ? max : tmp);
}
// Send the line data to the BufferedImage
raster.setSamples(x1,
m,
lineLength,
1,
destinationBands[i],
pixbuf);
}
}
if (destImage != null)
reader.processImageUpdateWrapper(destImage, x1, m,
cTileWidth, 1, 1, 1,
destinationBands);
float fraction = initialFraction +
(l - ty + 1.0F)/cTileHeight/totalTiles;
reader.processImageProgressWrapper(100.0f*fraction);
}
} // End loop on horizontal tiles
} // End loop on vertical tiles
return raster;
}
public ImageTypeSpecifier getImageType()
throws IOException {
getSampleModel();
getColorModel();
return new ImageTypeSpecifier(colorModel, sampleModel);
}
public SampleModel getSampleModel() {
if (sampleModel != null)
return sampleModel;
if (nComp == 1 && (maxDepth == 1 || maxDepth == 2 || maxDepth == 4))
sampleModel =
new MultiPixelPackedSampleModel(DataBuffer.TYPE_BYTE,
tileWidth,
tileHeight,
maxDepth);
else if (maxDepth <= 8)
sampleModel =
new PixelInterleavedSampleModel(DataBuffer.TYPE_BYTE,
tileWidth,
tileHeight,
nComp,
tileWidth * nComp,
bandOffsets);
else if (maxDepth <=16)
sampleModel =
new PixelInterleavedSampleModel(isSigned ?
DataBuffer.TYPE_SHORT :
DataBuffer.TYPE_USHORT,
tileWidth, tileHeight,
nComp,
tileWidth * nComp,
bandOffsets);
else if (maxDepth <= 32)
sampleModel =
new PixelInterleavedSampleModel(DataBuffer.TYPE_INT,
tileWidth,
tileHeight,
nComp,
tileWidth * nComp,
bandOffsets);
else
throw new IllegalArgumentException(I18N.getString("J2KReadState11") + " " +
+ maxDepth);
return sampleModel;
}
public ColorModel getColorModel() {
if (colorModel != null)
return colorModel;
// Attempt to get the ColorModel from the JP2 boxes.
colorModel = ff.getColorModel();
if (colorModel != null)
return colorModel;
if(hi.siz.csiz <= 4) {
// XXX: Code essentially duplicated from FileFormatReader.getColorModel().
// Create the ColorModel from the SIZ marker segment parameters.
ColorSpace cs;
if(hi.siz.csiz > 2) {
cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
} else {
cs = ColorSpace.getInstance(ColorSpace.CS_GRAY);
}
int[] bitsPerComponent = new int[hi.siz.csiz];
boolean isSigned = false;
int maxBitDepth = -1;
for(int i = 0; i < hi.siz.csiz; i++) {
bitsPerComponent[i] = hi.siz.getOrigBitDepth(i);
if(maxBitDepth < bitsPerComponent[i]) {
maxBitDepth = bitsPerComponent[i];
}
isSigned |= hi.siz.isOrigSigned(i);
}
boolean hasAlpha = hi.siz.csiz % 2 == 0;
int type = -1;
if (maxBitDepth <= 8) {
type = DataBuffer.TYPE_BYTE;
} else if (maxBitDepth <= 16) {
type = isSigned ? DataBuffer.TYPE_SHORT : DataBuffer.TYPE_USHORT;
} else if (maxBitDepth <= 32) {
type = DataBuffer.TYPE_INT;
}
if (type != -1) {
if(hi.siz.csiz == 1 &&
(maxBitDepth == 1 || maxBitDepth == 2 || maxBitDepth == 4)) {
colorModel = ImageUtil.createColorModel(getSampleModel());
} else {
colorModel = new ComponentColorModel(cs,
bitsPerComponent,
hasAlpha,
false,
hasAlpha ?
Transparency.TRANSLUCENT :
Transparency.OPAQUE ,
type);
}
return colorModel;
}
}
if(sampleModel == null) {
sampleModel = getSampleModel();
}
if (sampleModel == null)
return null;
return ImageUtil.createColorModel(null, sampleModel);
}
/**
* Returns the bounding rectangle of the upper left tile at
* the current resolution level.
*/
Rectangle getTile0Rect() {
return new Rectangle(tileXOffset, tileYOffset, tileWidth, tileHeight);
}
private int clip(int value, int min, int max) {
if (value < min)
value = min;
if (value > max)
value = max;
return value;
}
private void clipDestination(Rectangle dest) {
Point offset = j2krparam.getDestinationOffset();
if (dest.x < offset.x) {
dest.width += dest.x - offset.x;
dest.x = offset.x ;
}
if (dest.y < offset.y) {
dest.height += dest.y - offset.y;
dest.y = offset.y ;
}
}
}
| |
package gridwhack.gameobject.character;
import java.awt.*;
import java.io.Console;
import java.util.ArrayList;
import gridwhack.RandomProvider;
import gridwhack.base.BaseObject;
import gridwhack.gameobject.grid.Grid;
import gridwhack.gameobject.loot.Loot;
import gridwhack.gameobject.unit.Unit;
import gridwhack.gui.character.HealthBar;
import gridwhack.path.Path.Step;
import gridwhack.util.Vector2;
public abstract class NPCCharacter extends Character
{
// ----------
// Properties
// ----------
protected HealthBar healthBar;
protected Character target;
// -------
// Methods
// -------
/**
* Creates the object.
*/
public NPCCharacter()
{
super();
// Create a health bar to represent the character health.
healthBar = new HealthBar(0, 0, 30, 2, this);
}
/**
* @return the closest hostile character.
*/
public Character getClosestVisibleHostileCharacter()
{
Character closest = null;
int lowestCost = 0;
ArrayList<Unit> units = grid.getVisibleUnits(this);
// loop through all the visible units.
for (Unit unit : units)
{
if( unit!=this )
{
Character target = (Character) unit;
int cost = getDistanceCost(target);
// make sure the character is hostile.
if (isHostile(target))
{
// check if we have no closest character or if the character is closer
// than the character currently marked as the closest character.
if (closest == null || cost < lowestCost)
{
lowestCost = cost;
closest = target;
}
}
}
}
return closest;
}
/**
* Calculates the distance to a specific character.
* @param target the character to get the distance to.
* @return the distance in grid cells.
*/
public int getDistanceCost(Unit target)
{
Vector2 position = getGridPosition();
Vector2 targetPosition = target.getGridPosition();
// use pythagorean theorem to determine the distance. (a^2 + b^2 = c^2)
// might not be the perfect solution but good enough for now.
int cost = (int) Math.round( Math.sqrt( position.distance(targetPosition) ) );
return cost;
}
/**
* Moves the character across its path.
*/
public void moveAlongPath()
{
// Make sure we have a path.
if (path != null)
{
// Make sure there is a next step.
if (path.hasNextStep())
{
Step step = path.getNextStep();
Vector2 position = getGridPosition();
// Calculate the deltas.
int dgx = step.getX() - (int) position.x;
int dgy = step.getY() - (int) position.y;
grid.moveUnit(dgx, dgy, this);
}
// Clear the path once its moved across.
else
{
path = null;
}
}
}
/**
* Returns whether the current path is valid.
* @return whether the path is valid.
*/
public boolean isPathValid()
{
int pathLength = path.getLength();
Step step = path.getStep(pathLength-1);
Character target = getTarget();
Vector2 targetPosition = target.getGridPosition();
// compare the coordinates of the last step in the path
// to the target coordinates.
// TODO: Improve GridPath to take into account cell size automatically.
if( (step.getX() * grid.getCellSize() == targetPosition.x)
&& (step.getY() * grid.getCellSize() == targetPosition.y))
{
return true;
}
// path is invalid.
return false;
}
/**
* Actions to be taken when the character moves.
*/
public void move()
{
// move character if it is allowed to move.
if (movementAllowed())
{
// check if the character has a path.
if (path != null)
{
moveAlongPath();
// mark the character to have moved.
markMoved();
}
// character has no path.
else
{
// determine in which direction the character should move
// by simply randomizing the direction without any further logic.
int d = RandomProvider.getRand().nextInt(Directions.values().length);
Directions direction = Directions.values()[d];
// move in the random direction.
move(direction);
}
}
}
/**
* Marks the character dead.
*/
public synchronized void markDead()
{
this.createLoot();
super.markDead();
}
/**
* Creates loot.
*/
protected void createLoot()
{
Loot loot = new Loot();
// Create some random loot.
loot.createRandomItems();
// Make sure that we have items.
if (loot.getItemCount() > 0)
{
Vector2 position = getGridPosition();
loot.setGridPosition(position);
grid.addLoot(loot);
}
}
/**
* Update the object.
* @param parent The parent object.
*/
public void update(BaseObject parent)
{
super.update(parent);
// always engage the closest visible hostile character.
Character target = getClosestVisibleHostileCharacter();
// we found a target.
if (target != null)
{
setTarget(target);
// spawn a path to the target unless the character already has a valid path.
if (path == null || !isPathValid())
{
path = getPath(target.getGridX(), target.getGridY(), getViewRange());
}
}
// move the character.
move();
}
/**
* Draws the object.
* @param g The graphics context.
*/
@Override
public void draw(Graphics2D g)
{
if (!dead)
{
super.draw(g);
healthBar.draw(g);
/*
if (path != null)
{
path.draw(g);
}
*/
}
}
/**
* @return the character to target.
*/
public Character getTarget()
{
return target;
}
/**
* @param target character to target.
*/
public void setTarget(Character target)
{
this.target = target;
}
}
| |
/*
* Copyright (c) 2010 Jeff Schnitzer.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.googlecode.batchfb.impl;
import java.lang.reflect.Constructor;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.fasterxml.jackson.databind.JsonNode;
import com.googlecode.batchfb.Batcher;
import com.googlecode.batchfb.Later;
import com.googlecode.batchfb.err.FacebookException;
import com.googlecode.batchfb.err.OAuthException;
import com.googlecode.batchfb.err.PageMigratedException;
import com.googlecode.batchfb.err.PermissionException;
import com.googlecode.batchfb.err.QueryParseException;
import com.googlecode.batchfb.util.LaterWrapper;
/**
* <p>Detects a Facebook error in the JSON result from a Graph API request and throws
* the correct kind of exception, whatever that happens to be. It tries to match the
* type of the exception with an actual exception class of the correct name.</p>
*
* <p>In addition, this detects the REALLY WEIRD cases where Facebook just spits back
* "false". We translate that into a null.</p>
*
* <p>If there was no error, this wrapper just passes through normally.</p>
*
* <p>Facebook coughs up errors in at least three different formats. This
* detects them all.</p>
*/
public class ErrorDetectingWrapper extends LaterWrapper<JsonNode, JsonNode>
{
public ErrorDetectingWrapper(Later<JsonNode> orig) {
super(orig);
}
/** */
@Override
protected JsonNode convert(JsonNode node) {
// Hopefully a simple "false" at the top level is never a legitimate value... it seems that it should be mapped
// to null. It happens (among other times) when fetching multiple items and you don't have permission on one of them.
if (node == null || node.isBoolean() && !node.booleanValue())
return null;
this.checkForStandardGraphError(node);
this.checkForBatchError(node);
this.checkForOldRestStyleError(node);
return node;
}
/**
* The basic graph error looks like this:
<pre>
{
error: {
type: "OAuthException"
message: "Error validating application."
}
}
</pre>
*/
protected void checkForStandardGraphError(JsonNode node) {
JsonNode errorNode = node.get("error");
if (errorNode != null) {
// If we're missing type or message, it must be some other kind of error
String type = errorNode.path("type").asText();
if (type == null)
return;
String msg = errorNode.path("message").asText();
if (msg == null)
return;
// Special case, permission exceptions are poorly structured
if (msg.startsWith("(#200)"))
throw new PermissionException(msg);
// Special case, migration exceptions are poorly structured
if (msg.startsWith("(#21)"))
this.throwPageMigratedException(msg);
// We check to see if we have an exception that matches the type, otherwise
// we simply throw the base FacebookException
String proposedExceptionType = Batcher.class.getPackage().getName() + ".err." + type;
try {
Class<?> exceptionClass = Class.forName(proposedExceptionType);
Constructor<?> ctor = exceptionClass.getConstructor(String.class);
throw (FacebookException)ctor.newInstance(msg);
} catch (FacebookException e) {
throw e;
} catch (Exception e) {
throw new FacebookException(type + ": " + msg);
}
}
}
/** Matches IDs in the error msg */
private static final Pattern ID_PATTERN = Pattern.compile("ID [0-9]+");
/**
* Builds the proper exception and throws it.
* @throws PageMigratedException always
*/
private void throwPageMigratedException(String msg)
{
// This SUCKS ASS. Messages look like:
// (#21) Page ID 114267748588304 was migrated to page ID 111013272313096. Please update your API calls to the new ID
Matcher matcher = ID_PATTERN.matcher(msg);
long oldId = this.extractNextId(matcher, msg);
long newId = this.extractNextId(matcher, msg);
throw new PageMigratedException(msg, oldId, newId);
}
/**
* Gets the next id out of the matcher
*/
private long extractNextId(Matcher matcher, String msg)
{
if (!matcher.find())
throw new IllegalStateException("Facebook changed the error msg for page migration to something unfamiliar. The new msg is: " + msg);
String idStr = matcher.group().substring("ID ".length());
return Long.parseLong(idStr);
}
/**
* The batch call itself seems to have a funky error format:
*
* {"error":190,"error_description":"Invalid OAuth access token signature."}
*/
protected void checkForBatchError(JsonNode root) {
JsonNode errorCode = root.get("error");
if (errorCode != null) {
JsonNode errorDescription = root.get("error_description");
if (errorDescription != null) {
int code = errorCode.intValue();
String msg = errorDescription.asText();
this.throwCodeAndMessage(code, msg);
}
}
}
/**
* Old-style calls, including multiquery, has its own wacky error format:
<pre>
{
"error_code": 602,
"error_msg": "bogus is not a member of the user table.",
"request_args": [
{
"key": "queries",
"value": "{"query1":"SELECT uid FROM user WHERE uid=503702723",
"query2":"SELECT uid FROM user WHERE bogus=503702723"}"
},
{
"key": "method",
"value": "fql.multiquery"
},
{
"key": "access_token",
"value": "blahblahblah"
},
{
"key": "format",
"value": "json"
}
]
}
</pre>
*
* The code interpretations rely heavily on http://wiki.developers.facebook.com/index.php/Error_codes
*/
protected void checkForOldRestStyleError(JsonNode node) {
JsonNode errorCode = node.get("error_code");
if (errorCode != null) {
int code = errorCode.intValue();
String msg = node.path("error_msg").asText();
this.throwCodeAndMessage(code, msg);
}
}
/**
* Throw the appropriate exception for the given legacy code and message.
* Always throws, never returns.
*/
protected void throwCodeAndMessage(int code, String msg) {
switch (code) {
case 0:
case 101:
case 102:
case 190: throw new OAuthException(msg);
default:
if (code >= 200 && code < 300)
throw new PermissionException(msg);
else if (code >= 600 && code < 700)
throw new QueryParseException(msg);
else
throw new FacebookException(msg + " (code " + code +")");
}
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jabarasca.financial_app.utils;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ValueAnimator;
import android.graphics.Rect;
import android.os.SystemClock;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewPropertyAnimator;
import android.widget.AbsListView;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A {@link View.OnTouchListener} that makes the list items in a {@link ListView}
* dismissable. {@link ListView} is given special treatment because by default it handles touches
* for its list items... i.e. it's in charge of drawing the pressed state (the list selector),
* handling list item clicks, etc.
*
* <p>After creating the listener, the caller should also call
* {@link ListView#setOnScrollListener(AbsListView.OnScrollListener)}, passing
* in the scroll listener returned by {@link #makeScrollListener()}. If a scroll listener is
* already assigned, the caller should still pass scroll changes through to this listener. This will
* ensure that this {@link SwipeDismissListViewTouchListener} is paused during list view
* scrolling.</p>
*
* <p>Example usage:</p>
*
* <pre>
* SwipeDismissListViewTouchListener touchListener =
* new SwipeDismissListViewTouchListener(
* listView,
* new SwipeDismissListViewTouchListener.OnDismissCallback() {
* public void onDismiss(ListView listView, int[] reverseSortedPositions) {
* for (int position : reverseSortedPositions) {
* adapter.remove(adapter.getItem(position));
* }
* adapter.notifyDataSetChanged();
* }
* });
* listView.setOnTouchListener(touchListener);
* listView.setOnScrollListener(touchListener.makeScrollListener());
* </pre>
*
* <p>This class Requires API level 12 or later due to use of {@link
* ViewPropertyAnimator}.</p>
*
* <p>For a generalized {@link View.OnTouchListener} that makes any view dismissable,
* see {@link SwipeDismissTouchListener}.</p>
*
* @see SwipeDismissTouchListener
*/
public class SwipeDismissListViewTouchListener implements View.OnTouchListener {
// Cached ViewConfiguration and system-wide constant values
private int mSlop;
private int mMinFlingVelocity;
private int mMaxFlingVelocity;
private long mAnimationTime;
// Fixed properties
private ListView mListView;
private DismissCallbacks mCallbacks;
private int mViewWidth = 1; // 1 and not 0 to prevent dividing by zero
// Transient properties
private List<PendingDismissData> mPendingDismisses = new ArrayList<PendingDismissData>();
private int mDismissAnimationRefCount = 0;
private float mDownX;
private float mDownY;
private boolean mSwiping;
private int mSwipingSlop;
private VelocityTracker mVelocityTracker;
private int mDownPosition;
private View mDownView;
private boolean mPaused;
/**
* The callback interface used by {@link SwipeDismissListViewTouchListener} to inform its client
* about a successful dismissal of one or more list item positions.
*/
public interface DismissCallbacks {
/**
* Called to determine whether the given position can be dismissed.
*/
boolean canDismiss(int position);
/**
* Called when the user has indicated they she would like to dismiss one or more list item
* positions.
*
* @param listView The originating {@link ListView}.
* @param reverseSortedPositions An array of positions to dismiss, sorted in descending
* order for convenience.
*/
void onDismiss(ListView listView, int[] reverseSortedPositions);
}
/**
* Constructs a new swipe-to-dismiss touch listener for the given list view.
*
* @param listView The list view whose items should be dismissable.
* @param callbacks The callback to trigger when the user has indicated that she would like to
* dismiss one or more list items.
*/
public SwipeDismissListViewTouchListener(ListView listView, DismissCallbacks callbacks) {
ViewConfiguration vc = ViewConfiguration.get(listView.getContext());
mSlop = vc.getScaledTouchSlop();
mMinFlingVelocity = vc.getScaledMinimumFlingVelocity() * 16;
mMaxFlingVelocity = vc.getScaledMaximumFlingVelocity();
mAnimationTime = listView.getContext().getResources().getInteger(
android.R.integer.config_shortAnimTime);
mListView = listView;
mCallbacks = callbacks;
}
/**
* Enables or disables (pauses or resumes) watching for swipe-to-dismiss gestures.
*
* @param enabled Whether or not to watch for gestures.
*/
public void setEnabled(boolean enabled) {
mPaused = !enabled;
}
/**
* Returns an {@link AbsListView.OnScrollListener} to be added to the {@link
* ListView} using {@link ListView#setOnScrollListener(AbsListView.OnScrollListener)}.
* If a scroll listener is already assigned, the caller should still pass scroll changes through
* to this listener. This will ensure that this {@link SwipeDismissListViewTouchListener} is
* paused during list view scrolling.</p>
*
* @see SwipeDismissListViewTouchListener
*/
public AbsListView.OnScrollListener makeScrollListener() {
return new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView absListView, int scrollState) {
setEnabled(scrollState != AbsListView.OnScrollListener.SCROLL_STATE_TOUCH_SCROLL);
}
@Override
public void onScroll(AbsListView absListView, int i, int i1, int i2) {
}
};
}
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
if (mViewWidth < 2) {
mViewWidth = mListView.getWidth();
}
switch (motionEvent.getActionMasked()) {
case MotionEvent.ACTION_DOWN: {
if (mPaused) {
return false;
}
// TODO: ensure this is a finger, and set a flag
// Find the child view that was touched (perform a hit test)
Rect rect = new Rect();
int childCount = mListView.getChildCount();
int[] listViewCoords = new int[2];
mListView.getLocationOnScreen(listViewCoords);
int x = (int) motionEvent.getRawX() - listViewCoords[0];
int y = (int) motionEvent.getRawY() - listViewCoords[1];
View child;
for (int i = 0; i < childCount; i++) {
child = mListView.getChildAt(i);
child.getHitRect(rect);
if (rect.contains(x, y)) {
mDownView = child;
break;
}
}
if (mDownView != null) {
mDownX = motionEvent.getRawX();
mDownY = motionEvent.getRawY();
mDownPosition = mListView.getPositionForView(mDownView);
if (mCallbacks.canDismiss(mDownPosition)) {
mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(motionEvent);
} else {
mDownView = null;
}
}
return false;
}
case MotionEvent.ACTION_CANCEL: {
if (mVelocityTracker == null) {
break;
}
if (mDownView != null && mSwiping) {
// cancel
mDownView.animate()
.translationX(0)
.alpha(1)
.setDuration(mAnimationTime)
.setListener(null);
}
mVelocityTracker.recycle();
mVelocityTracker = null;
mDownX = 0;
mDownY = 0;
mDownView = null;
mDownPosition = ListView.INVALID_POSITION;
mSwiping = false;
break;
}
case MotionEvent.ACTION_UP: {
if (mVelocityTracker == null) {
break;
}
float deltaX = motionEvent.getRawX() - mDownX;
mVelocityTracker.addMovement(motionEvent);
mVelocityTracker.computeCurrentVelocity(1000);
float velocityX = mVelocityTracker.getXVelocity();
float absVelocityX = Math.abs(velocityX);
float absVelocityY = Math.abs(mVelocityTracker.getYVelocity());
boolean dismiss = false;
boolean dismissRight = false;
if (Math.abs(deltaX) > mViewWidth / 2 && mSwiping) {
dismiss = true;
dismissRight = deltaX > 0;
} else if (mMinFlingVelocity <= absVelocityX && absVelocityX <= mMaxFlingVelocity
&& absVelocityY < absVelocityX && mSwiping) {
// dismiss only if flinging in the same direction as dragging
dismiss = (velocityX < 0) == (deltaX < 0);
dismissRight = mVelocityTracker.getXVelocity() > 0;
}
if (dismiss && mDownPosition != ListView.INVALID_POSITION) {
// dismiss
final View downView = mDownView; // mDownView gets null'd before animation ends
final int downPosition = mDownPosition;
++mDismissAnimationRefCount;
mDownView.animate()
.translationX(dismissRight ? mViewWidth : -mViewWidth)
.alpha(0)
.setDuration(mAnimationTime)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
performDismiss(downView, downPosition);
}
});
} else {
// cancel
mDownView.animate()
.translationX(0)
.alpha(1)
.setDuration(mAnimationTime)
.setListener(null);
}
mVelocityTracker.recycle();
mVelocityTracker = null;
mDownX = 0;
mDownY = 0;
mDownView = null;
mDownPosition = ListView.INVALID_POSITION;
mSwiping = false;
break;
}
case MotionEvent.ACTION_MOVE: {
if (mVelocityTracker == null || mPaused) {
break;
}
mVelocityTracker.addMovement(motionEvent);
float deltaX = motionEvent.getRawX() - mDownX;
float deltaY = motionEvent.getRawY() - mDownY;
if (Math.abs(deltaX) > mSlop && Math.abs(deltaY) < Math.abs(deltaX) / 2) {
mSwiping = true;
mSwipingSlop = (deltaX > 0 ? mSlop : -mSlop);
mListView.requestDisallowInterceptTouchEvent(true);
// Cancel ListView's touch (un-highlighting the item)
MotionEvent cancelEvent = MotionEvent.obtain(motionEvent);
cancelEvent.setAction(MotionEvent.ACTION_CANCEL |
(motionEvent.getActionIndex()
<< MotionEvent.ACTION_POINTER_INDEX_SHIFT));
mListView.onTouchEvent(cancelEvent);
cancelEvent.recycle();
}
if (mSwiping) {
mDownView.setTranslationX(deltaX - mSwipingSlop);
mDownView.setAlpha(Math.max(0f, Math.min(1f,
1f - 2f * Math.abs(deltaX) / mViewWidth)));
return true;
}
break;
}
}
return false;
}
class PendingDismissData implements Comparable<PendingDismissData> {
public int position;
public View view;
public PendingDismissData(int position, View view) {
this.position = position;
this.view = view;
}
@Override
public int compareTo(PendingDismissData other) {
// Sort by descending position
return other.position - position;
}
}
private void performDismiss(final View dismissView, final int dismissPosition) {
// Animate the dismissed list item to zero-height and fire the dismiss callback when
// all dismissed list item animations have completed. This triggers layout on each animation
// frame; in the future we may want to do something smarter and more performant.
final ViewGroup.LayoutParams lp = dismissView.getLayoutParams();
final int originalHeight = dismissView.getHeight();
ValueAnimator animator = ValueAnimator.ofInt(originalHeight, 1).setDuration(mAnimationTime);
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
--mDismissAnimationRefCount;
if (mDismissAnimationRefCount == 0) {
// No active animations, process all pending dismisses.
// Sort by descending position
Collections.sort(mPendingDismisses);
int[] dismissPositions = new int[mPendingDismisses.size()];
for (int i = mPendingDismisses.size() - 1; i >= 0; i--) {
dismissPositions[i] = mPendingDismisses.get(i).position;
}
mCallbacks.onDismiss(mListView, dismissPositions);
// Reset mDownPosition to avoid MotionEvent.ACTION_UP trying to start a dismiss
// animation with a stale position
mDownPosition = ListView.INVALID_POSITION;
ViewGroup.LayoutParams lp;
for (PendingDismissData pendingDismiss : mPendingDismisses) {
// Reset view presentation
pendingDismiss.view.setAlpha(1f);
pendingDismiss.view.setTranslationX(0);
lp = pendingDismiss.view.getLayoutParams();
lp.height = originalHeight;
pendingDismiss.view.setLayoutParams(lp);
}
// Send a cancel event
long time = SystemClock.uptimeMillis();
MotionEvent cancelEvent = MotionEvent.obtain(time, time,
MotionEvent.ACTION_CANCEL, 0, 0, 0);
mListView.dispatchTouchEvent(cancelEvent);
mPendingDismisses.clear();
}
}
});
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
lp.height = (Integer) valueAnimator.getAnimatedValue();
dismissView.setLayoutParams(lp);
}
});
mPendingDismisses.add(new PendingDismissData(dismissPosition, dismissView));
animator.start();
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.query.window;
import io.siddhi.core.SiddhiAppRuntime;
import io.siddhi.core.SiddhiManager;
import io.siddhi.core.event.Event;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.query.output.callback.QueryCallback;
import io.siddhi.core.stream.input.InputHandler;
import io.siddhi.core.stream.output.StreamCallback;
import io.siddhi.core.util.EventPrinter;
import org.apache.log4j.Logger;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class TimeWindowTestCase {
private static final Logger log = Logger.getLogger(TimeWindowTestCase.class);
private int inEventCount;
private int removeEventCount;
private boolean eventArrived;
@BeforeMethod
public void init() {
inEventCount = 0;
removeEventCount = 0;
eventArrived = false;
}
@Test
public void timeWindowTest1() throws InterruptedException {
SiddhiManager siddhiManager = new SiddhiManager();
String cseEventStream = "" +
"define stream cseEventStream (symbol string, price float, volume int);";
String query = "" +
"@info(name = 'query1') " +
"from cseEventStream#window.time(2 sec) " +
"select symbol,price,volume " +
"insert all events into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(cseEventStream + query);
siddhiAppRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timestamp, inEvents, removeEvents);
if (inEvents != null) {
inEventCount = inEventCount + inEvents.length;
}
if (removeEvents != null) {
AssertJUnit.assertTrue("InEvents arrived before RemoveEvents", inEventCount > removeEventCount);
removeEventCount = removeEventCount + removeEvents.length;
}
eventArrived = true;
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"IBM", 700f, 0});
inputHandler.send(new Object[]{"WSO2", 60.5f, 1});
Thread.sleep(4000);
AssertJUnit.assertEquals(2, inEventCount);
AssertJUnit.assertEquals(2, removeEventCount);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
/**
* Commenting out intermittent failing test case until fix this properly.
*
* @throws InterruptedException throw exception if interrupted the input handler sender.
*/
@Test
public void timeWindowTest2() throws InterruptedException {
SiddhiManager siddhiManager = new SiddhiManager();
String cseEventStream = "define stream cseEventStream (symbol string, price float, volume int);";
String query = "@info(name = 'query1') from cseEventStream#window.time(1 sec) select symbol,price," +
"volume insert all events into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(cseEventStream + query);
siddhiAppRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timestamp, inEvents, removeEvents);
if (inEvents != null) {
inEventCount = inEventCount + inEvents.length;
}
if (removeEvents != null) {
AssertJUnit.assertTrue("InEvents arrived before RemoveEvents", inEventCount > removeEventCount);
removeEventCount = removeEventCount + removeEvents.length;
}
eventArrived = true;
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"IBM", 700f, 1});
inputHandler.send(new Object[]{"WSO2", 60.5f, 2});
Thread.sleep(1100);
inputHandler.send(new Object[]{"IBM", 700f, 3});
inputHandler.send(new Object[]{"WSO2", 60.5f, 4});
Thread.sleep(1100);
inputHandler.send(new Object[]{"IBM", 700f, 5});
inputHandler.send(new Object[]{"WSO2", 60.5f, 6});
Thread.sleep(4000);
AssertJUnit.assertEquals(6, inEventCount);
AssertJUnit.assertEquals(6, removeEventCount);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
@Test
public void timeWindowTest3() throws InterruptedException {
SiddhiManager siddhiManager = new SiddhiManager();
String queries = "define stream fireAlarmEventStream (deviceID string, sonar double);\n" +
"@info(name = 'query1')\n" +
"from fireAlarmEventStream#window.time(30 milliseconds)\n" +
"select deviceID\n" +
"insert expired events into analyzeStream;\n" +
"" +
"@info(name = 'query2')\n" +
"from analyzeStream\n" +
"select deviceID\n" +
"insert into bulbOnStream;\n";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(queries);
siddhiAppRuntime.addCallback("analyzeStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
eventArrived = true;
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("fireAlarmEventStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[]{"id1", 20d});
inputHandler.send(new Object[]{"id2", 20d});
Thread.sleep(2000);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
@Test(expectedExceptions = SiddhiAppCreationException.class)
public void timeWindowTest4() throws InterruptedException {
SiddhiManager siddhiManager = new SiddhiManager();
String cseEventStream = "" +
"define stream cseEventStream (symbol string, price float, volume int);";
String query = "" +
"@info(name = 'query1') " +
"from cseEventStream#window.time(2 sec, 5) " +
"select symbol,price,volume " +
"insert all events into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(cseEventStream + query);
}
@Test(expectedExceptions = SiddhiAppCreationException.class)
public void timeWindowTest5() throws InterruptedException {
SiddhiManager siddhiManager = new SiddhiManager();
String cseEventStream = "" +
"define stream cseEventStream (symbol string, time long, volume int);";
String query = "" +
"@info(name = 'query1') " +
"from cseEventStream#window.time(time) " +
"select symbol,price,volume " +
"insert all events into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(cseEventStream + query);
}
@Test(expectedExceptions = SiddhiAppCreationException.class)
public void timeWindowTest6() throws InterruptedException, SiddhiAppCreationException {
SiddhiManager siddhiManager = new SiddhiManager();
String cseEventStream = "" +
"define stream cseEventStream (symbol string, time long, volume int);";
String query = "" +
"@info(name = 'query1') " +
"from cseEventStream#window.time(4.7) " +
"select symbol,price,volume " +
"insert all events into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(cseEventStream + query);
}
}
| |
package com.plusub.lib.util;
import java.lang.reflect.Method;
import android.app.DownloadManager;
import android.app.DownloadManager.Request;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
/**
* DownloadManagerPro
* <ul>
* <strong>Get download info</strong>
* <li>{@link #getStatusById(long)} get download status</li>
* <li>{@link #getDownloadBytes(long)} get downloaded byte, total byte</li>
* <li>{@link #getBytesAndStatus(long)} get downloaded byte, total byte and download status</li>
* <li>{@link #getFileName(long)} get download file name</li>
* <li>{@link #getUri(long)} get download uri</li>
* <li>{@link #getReason(long)} get failed code or paused reason</li>
* <li>{@link #getPausedReason(long)} get paused reason</li>
* <li>{@link #getErrorCode(long)} get failed error code</li>
* </ul>
* <ul>
* <strong>Operate download</strong>
* <li>{@link #isExistPauseAndResumeMethod()} whether exist pauseDownload and resumeDownload method in
* {@link DownloadManager}</li>
* <li>{@link #pauseDownload(long...)} pause download. need pauseDownload(long...) method in {@link DownloadManager}</li>
* <li>{@link #resumeDownload(long...)} resume download. need resumeDownload(long...) method in {@link DownloadManager}</li>
* </ul>
* <ul>
* <strong>RequestPro</strong>
* <li>{@link RequestPro#setNotiClass(String)} set noti class</li>
* <li>{@link RequestPro#setNotiExtras(String)} set noti extras</li>
* </ul>
*
* @author <a href="http://www.trinea.cn" target="_blank">Trinea</a> 2013-5-4
*/
public class DownloadManagerPro {
public static final Uri CONTENT_URI = Uri.parse("content://downloads/my_downloads");
/** represents downloaded file above api 11 **/
public static final String COLUMN_LOCAL_FILENAME = "local_filename";
/** represents downloaded file below api 11 **/
public static final String COLUMN_LOCAL_URI = "local_uri";
public static final String METHOD_NAME_PAUSE_DOWNLOAD = "pauseDownload";
public static final String METHOD_NAME_RESUME_DOWNLOAD = "resumeDownload";
private static boolean isInitPauseDownload = false;
private static boolean isInitResumeDownload = false;
private static Method pauseDownload = null;
private static Method resumeDownload = null;
private DownloadManager downloadManager;
public DownloadManagerPro(DownloadManager downloadManager) {
this.downloadManager = downloadManager;
}
/**
* get download status
*
* @param downloadId
* @return
*/
public int getStatusById(long downloadId) {
return getInt(downloadId, DownloadManager.COLUMN_STATUS);
}
/**
* get downloaded byte, total byte
*
* @param downloadId
* @return a int array with two elements
* <ul>
* <li>result[0] represents downloaded bytes, This will initially be -1.</li>
* <li>result[1] represents total bytes, This will initially be -1.</li>
* </ul>
*/
public int[] getDownloadBytes(long downloadId) {
int[] bytesAndStatus = getBytesAndStatus(downloadId);
return new int[] {bytesAndStatus[0], bytesAndStatus[1]};
}
/**
* get downloaded byte, total byte and download status
*
* @param downloadId
* @return a int array with three elements
* <ul>
* <li>result[0] represents downloaded bytes, This will initially be -1.</li>
* <li>result[1] represents total bytes, This will initially be -1.</li>
* <li>result[2] represents download status, This will initially be 0.</li>
* </ul>
*/
public int[] getBytesAndStatus(long downloadId) {
int[] bytesAndStatus = new int[] {-1, -1, 0};
DownloadManager.Query query = new DownloadManager.Query().setFilterById(downloadId);
Cursor c = null;
try {
c = downloadManager.query(query);
if (c != null && c.moveToFirst()) {
bytesAndStatus[0] = c.getInt(c.getColumnIndexOrThrow(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR));
bytesAndStatus[1] = c.getInt(c.getColumnIndexOrThrow(DownloadManager.COLUMN_TOTAL_SIZE_BYTES));
bytesAndStatus[2] = c.getInt(c.getColumnIndex(DownloadManager.COLUMN_STATUS));
}
} finally {
if (c != null) {
c.close();
}
}
return bytesAndStatus;
}
/**
* pause download
*
* @param ids the IDs of the downloads to be paused
* @return the number of downloads actually paused, -1 if exception or method not exist
*/
public int pauseDownload(long... ids) {
initPauseMethod();
if (pauseDownload == null) {
return -1;
}
try {
return ((Integer)pauseDownload.invoke(downloadManager, ids)).intValue();
} catch (Exception e) {
/**
* accept all exception, include ClassNotFoundException, NoSuchMethodException, InvocationTargetException,
* NullPointException
*/
e.printStackTrace();
}
return -1;
}
/**
* resume download
*
* @param ids the IDs of the downloads to be resumed
* @return the number of downloads actually resumed, -1 if exception or method not exist
*/
public int resumeDownload(long... ids) {
initResumeMethod();
if (resumeDownload == null) {
return -1;
}
try {
return ((Integer)resumeDownload.invoke(downloadManager, ids)).intValue();
} catch (Exception e) {
/**
* accept all exception, include ClassNotFoundException, NoSuchMethodException, InvocationTargetException,
* NullPointException
*/
e.printStackTrace();
}
return -1;
}
/**
* whether exist pauseDownload and resumeDownload method in {@link DownloadManager}
*
* @return
*/
public static boolean isExistPauseAndResumeMethod() {
initPauseMethod();
initResumeMethod();
return pauseDownload != null && resumeDownload != null;
}
private static void initPauseMethod() {
if (isInitPauseDownload) {
return;
}
isInitPauseDownload = true;
try {
pauseDownload = DownloadManager.class.getMethod(METHOD_NAME_PAUSE_DOWNLOAD, long[].class);
} catch (Exception e) {
// accept all exception
e.printStackTrace();
}
}
private static void initResumeMethod() {
if (isInitResumeDownload) {
return;
}
isInitResumeDownload = true;
try {
resumeDownload = DownloadManager.class.getMethod(METHOD_NAME_RESUME_DOWNLOAD, long[].class);
} catch (Exception e) {
// accept all exception
e.printStackTrace();
}
}
/**
* get download file name
*
* @param downloadId
* @return
*/
public String getFileName(long downloadId) {
return getString(downloadId, (Build.VERSION.SDK_INT < 11 ? COLUMN_LOCAL_URI : COLUMN_LOCAL_FILENAME));
}
/**
* get download uri
*
* @param downloadId
* @return
*/
public String getUri(long downloadId) {
return getString(downloadId, DownloadManager.COLUMN_URI);
}
/**
* get failed code or paused reason
*
* @param downloadId
* @return <ul>
* <li>if status of downloadId is {@link DownloadManager#STATUS_PAUSED}, return
* {@link #getPausedReason(long)}</li>
* <li>if status of downloadId is {@link DownloadManager#STATUS_FAILED}, return {@link #getErrorCode(long)}</li>
* <li>if status of downloadId is neither {@link DownloadManager#STATUS_PAUSED} nor
* {@link DownloadManager#STATUS_FAILED}, return 0</li>
* </ul>
*/
public int getReason(long downloadId) {
return getInt(downloadId, DownloadManager.COLUMN_REASON);
}
/**
* get paused reason
*
* @param downloadId
* @return <ul>
* <li>if status of downloadId is {@link DownloadManager#STATUS_PAUSED}, return one of
* {@link DownloadManager#PAUSED_WAITING_TO_RETRY}<br/>
* {@link DownloadManager#PAUSED_WAITING_FOR_NETWORK}<br/>
* {@link DownloadManager#PAUSED_QUEUED_FOR_WIFI}<br/>
* {@link DownloadManager#PAUSED_UNKNOWN}</li>
* <li>else return {@link DownloadManager#PAUSED_UNKNOWN}</li>
* </ul>
*/
public int getPausedReason(long downloadId) {
return getInt(downloadId, DownloadManager.COLUMN_REASON);
}
/**
* get failed error code
*
* @param downloadId
* @return one of {@link DownloadManager#ERROR_*}
*/
public int getErrorCode(long downloadId) {
return getInt(downloadId, DownloadManager.COLUMN_REASON);
}
public static class RequestPro extends DownloadManager.Request {
public static final String METHOD_NAME_SET_NOTI_CLASS = "setNotiClass";
public static final String METHOD_NAME_SET_NOTI_EXTRAS = "setNotiExtras";
private static boolean isInitNotiClass = false;
private static boolean isInitNotiExtras = false;
private static Method setNotiClass = null;
private static Method setNotiExtras = null;
/**
* @param uri the HTTP URI to download.
*/
public RequestPro(Uri uri) {
super(uri);
}
/**
* set noti class, only init once
*
* @param className full class name
*/
public void setNotiClass(String className) {
synchronized (this) {
if (!isInitNotiClass) {
isInitNotiClass = true;
try {
setNotiClass = Request.class.getMethod(METHOD_NAME_SET_NOTI_CLASS, CharSequence.class);
} catch (Exception e) {
// accept all exception
e.printStackTrace();
}
}
}
if (setNotiClass != null) {
try {
setNotiClass.invoke(this, className);
} catch (Exception e) {
/**
* accept all exception, include ClassNotFoundException, NoSuchMethodException,
* InvocationTargetException, NullPointException
*/
e.printStackTrace();
}
}
}
/**
* set noti extras, only init once
*
* @param extras
*/
public void setNotiExtras(String extras) {
synchronized (this) {
if (!isInitNotiExtras) {
isInitNotiExtras = true;
try {
setNotiExtras = Request.class.getMethod(METHOD_NAME_SET_NOTI_EXTRAS, CharSequence.class);
} catch (Exception e) {
// accept all exception
e.printStackTrace();
}
}
}
if (setNotiExtras != null) {
try {
setNotiExtras.invoke(this, extras);
} catch (Exception e) {
/**
* accept all exception, include ClassNotFoundException, NoSuchMethodException,
* InvocationTargetException, NullPointException
*/
e.printStackTrace();
}
}
}
}
/**
* get string column
*
* @param downloadId
* @param columnName
* @return
*/
private String getString(long downloadId, String columnName) {
DownloadManager.Query query = new DownloadManager.Query().setFilterById(downloadId);
String result = null;
Cursor c = null;
try {
c = downloadManager.query(query);
if (c != null && c.moveToFirst()) {
result = c.getString(c.getColumnIndex(columnName));
}
} finally {
if (c != null) {
c.close();
}
}
return result;
}
/**
* get int column
*
* @param downloadId
* @param columnName
* @return
*/
private int getInt(long downloadId, String columnName) {
DownloadManager.Query query = new DownloadManager.Query().setFilterById(downloadId);
int result = -1;
Cursor c = null;
try {
c = downloadManager.query(query);
if (c != null && c.moveToFirst()) {
result = c.getInt(c.getColumnIndex(columnName));
}
} finally {
if (c != null) {
c.close();
}
}
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdfwriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSBoolean;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSDocument;
import org.apache.pdfbox.cos.COSFloat;
import org.apache.pdfbox.cos.COSInteger;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSNull;
import org.apache.pdfbox.cos.COSNumber;
import org.apache.pdfbox.cos.COSObject;
import org.apache.pdfbox.cos.COSObjectKey;
import org.apache.pdfbox.cos.COSStream;
import org.apache.pdfbox.cos.COSString;
import org.apache.pdfbox.cos.COSUpdateInfo;
import org.apache.pdfbox.cos.ICOSVisitor;
import org.apache.pdfbox.io.IOUtils;
import org.apache.pdfbox.pdfparser.PDFXRefStream;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.SecurityHandler;
import org.apache.pdfbox.pdmodel.fdf.FDFDocument;
import org.apache.pdfbox.pdmodel.interactive.digitalsignature.SignatureInterface;
import org.apache.pdfbox.util.Charsets;
import org.apache.pdfbox.util.Hex;
/**
* This class acts on a in-memory representation of a PDF document.
*
* @author Michael Traut
* @author Ben Litchfield
*/
public class COSWriter implements ICOSVisitor, Closeable
{
private static final Log LOG = LogFactory.getLog(COSWriter.class);
/**
* The dictionary open token.
*/
public static final byte[] DICT_OPEN = "<<".getBytes(Charsets.US_ASCII);
/**
* The dictionary close token.
*/
public static final byte[] DICT_CLOSE = ">>".getBytes(Charsets.US_ASCII);
/**
* space character.
*/
public static final byte[] SPACE = { ' ' };
/**
* The start to a PDF comment.
*/
public static final byte[] COMMENT = { '%' };
/**
* The output version of the PDF.
*/
public static final byte[] VERSION = "PDF-1.4".getBytes(Charsets.US_ASCII);
/**
* Garbage bytes used to create the PDF header.
*/
public static final byte[] GARBAGE = new byte[] {(byte)0xf6, (byte)0xe4, (byte)0xfc, (byte)0xdf};
/**
* The EOF constant.
*/
public static final byte[] EOF = "%%EOF".getBytes(Charsets.US_ASCII);
// pdf tokens
/**
* The reference token.
*/
public static final byte[] REFERENCE = "R".getBytes(Charsets.US_ASCII);
/**
* The XREF token.
*/
public static final byte[] XREF = "xref".getBytes(Charsets.US_ASCII);
/**
* The xref free token.
*/
public static final byte[] XREF_FREE = "f".getBytes(Charsets.US_ASCII);
/**
* The xref used token.
*/
public static final byte[] XREF_USED = "n".getBytes(Charsets.US_ASCII);
/**
* The trailer token.
*/
public static final byte[] TRAILER = "trailer".getBytes(Charsets.US_ASCII);
/**
* The start xref token.
*/
public static final byte[] STARTXREF = "startxref".getBytes(Charsets.US_ASCII);
/**
* The starting object token.
*/
public static final byte[] OBJ = "obj".getBytes(Charsets.US_ASCII);
/**
* The end object token.
*/
public static final byte[] ENDOBJ = "endobj".getBytes(Charsets.US_ASCII);
/**
* The array open token.
*/
public static final byte[] ARRAY_OPEN = "[".getBytes(Charsets.US_ASCII);
/**
* The array close token.
*/
public static final byte[] ARRAY_CLOSE = "]".getBytes(Charsets.US_ASCII);
/**
* The open stream token.
*/
public static final byte[] STREAM = "stream".getBytes(Charsets.US_ASCII);
/**
* The close stream token.
*/
public static final byte[] ENDSTREAM = "endstream".getBytes(Charsets.US_ASCII);
private final NumberFormat formatXrefOffset = new DecimalFormat("0000000000");
// the decimal format for the xref object generation number data
private final NumberFormat formatXrefGeneration = new DecimalFormat("00000");
private final NumberFormat formatDecimal = NumberFormat.getNumberInstance( Locale.US );
// the stream where we create the pdf output
private OutputStream output;
// the stream used to write standard cos data
private COSStandardOutputStream standardOutput;
// the start position of the x ref section
private long startxref = 0;
// the current object number
private long number = 0;
// maps the object to the keys generated in the writer
// these are used for indirect references in other objects
//A hashtable is used on purpose over a hashmap
//so that null entries will not get added.
private final Map<COSBase,COSObjectKey> objectKeys = new Hashtable<COSBase,COSObjectKey>();
private final Map<COSObjectKey,COSBase> keyObject = new Hashtable<COSObjectKey,COSBase>();
// the list of x ref entries to be made so far
private final List<COSWriterXRefEntry> xRefEntries = new ArrayList<COSWriterXRefEntry>();
private final Set<COSBase> objectsToWriteSet = new HashSet<COSBase>();
//A list of objects to write.
private final Deque<COSBase> objectsToWrite = new LinkedList<COSBase>();
//a list of objects already written
private final Set<COSBase> writtenObjects = new HashSet<COSBase>();
//An 'actual' is any COSBase that is not a COSObject.
//need to keep a list of the actuals that are added
//as well as the objects because there is a problem
//when adding a COSObject and then later adding
//the actual for that object, so we will track
//actuals separately.
private final Set<COSBase> actualsAdded = new HashSet<COSBase>();
private COSObjectKey currentObjectKey = null;
private PDDocument pdDocument = null;
private FDFDocument fdfDocument = null;
private boolean willEncrypt = false;
// signing
private boolean incrementalUpdate = false;
private boolean reachedSignature = false;
private long signatureOffset, signatureLength;
private long byteRangeOffset, byteRangeLength;
private InputStream incrementalInput;
private OutputStream incrementalOutput;
private SignatureInterface signatureInterface;
/**
* COSWriter constructor comment.
*
* @param os The wrapped output stream.
*/
public COSWriter(OutputStream os)
{
super();
setOutput(os);
setStandardOutput(new COSStandardOutputStream(output));
formatDecimal.setMaximumFractionDigits( 10 );
formatDecimal.setGroupingUsed( false );
}
/**
* COSWriter constructor for incremental updates.
*
* @param outputStream output stream where the new PDF data will be written
* @param inputStream input stream containing source PDF data
*
* @throws IOException if something went wrong
*/
public COSWriter(OutputStream outputStream, InputStream inputStream) throws IOException
{
super();
// write to buffer instead of output
setOutput(new ByteArrayOutputStream());
setStandardOutput(new COSStandardOutputStream(output, inputStream.available()));
incrementalInput = inputStream;
incrementalOutput = outputStream;
incrementalUpdate = true;
formatDecimal.setMaximumFractionDigits( 10 );
formatDecimal.setGroupingUsed( false );
}
private void prepareIncrement(PDDocument doc)
{
try
{
if (doc != null)
{
COSDocument cosDoc = doc.getDocument();
Map<COSObjectKey, Long> xrefTable = cosDoc.getXrefTable();
Set<COSObjectKey> keySet = xrefTable.keySet();
long highestNumber=0;
for ( COSObjectKey cosObjectKey : keySet )
{
COSBase object = cosDoc.getObjectFromPool(cosObjectKey).getObject();
if (object != null && cosObjectKey!= null && !(object instanceof COSNumber))
{
objectKeys.put(object, cosObjectKey);
keyObject.put(cosObjectKey,object);
}
if (cosObjectKey != null)
{
long num = cosObjectKey.getNumber();
if (num > highestNumber)
{
highestNumber = num;
}
}
}
setNumber(highestNumber);
}
}
catch (IOException e)
{
LOG.error(e,e);
}
}
/**
* add an entry in the x ref table for later dump.
*
* @param entry The new entry to add.
*/
protected void addXRefEntry(COSWriterXRefEntry entry)
{
getXRefEntries().add(entry);
}
/**
* This will close the stream.
*
* @throws IOException If the underlying stream throws an exception.
*/
@Override
public void close() throws IOException
{
if (getStandardOutput() != null)
{
getStandardOutput().close();
}
if (getOutput() != null)
{
getOutput().close();
}
if (incrementalOutput != null)
{
incrementalOutput.close();
}
}
/**
* This will get the current object number.
*
* @return The current object number.
*/
protected long getNumber()
{
return number;
}
/**
* This will get all available object keys.
*
* @return A map of all object keys.
*/
public Map<COSBase,COSObjectKey> getObjectKeys()
{
return objectKeys;
}
/**
* This will get the output stream.
*
* @return The output stream.
*/
protected java.io.OutputStream getOutput()
{
return output;
}
/**
* This will get the standard output stream.
*
* @return The standard output stream.
*/
protected COSStandardOutputStream getStandardOutput()
{
return standardOutput;
}
/**
* This will get the current start xref.
*
* @return The current start xref.
*/
protected long getStartxref()
{
return startxref;
}
/**
* This will get the xref entries.
*
* @return All available xref entries.
*/
protected List<COSWriterXRefEntry> getXRefEntries()
{
return xRefEntries;
}
/**
* This will set the current object number.
*
* @param newNumber The new object number.
*/
protected void setNumber(long newNumber)
{
number = newNumber;
}
/**
* This will set the output stream.
*
* @param newOutput The new output stream.
*/
private void setOutput( OutputStream newOutput )
{
output = newOutput;
}
/**
* This will set the standard output stream.
*
* @param newStandardOutput The new standard output stream.
*/
private void setStandardOutput(COSStandardOutputStream newStandardOutput)
{
standardOutput = newStandardOutput;
}
/**
* This will set the start xref.
*
* @param newStartxref The new start xref attribute.
*/
protected void setStartxref(long newStartxref)
{
startxref = newStartxref;
}
/**
* This will write the body of the document.
*
* @param doc The document to write the body for.
*
* @throws IOException If there is an error writing the data.
*/
protected void doWriteBody(COSDocument doc) throws IOException
{
COSDictionary trailer = doc.getTrailer();
COSDictionary root = (COSDictionary)trailer.getDictionaryObject( COSName.ROOT );
COSDictionary info = (COSDictionary)trailer.getDictionaryObject( COSName.INFO );
COSDictionary encrypt = (COSDictionary)trailer.getDictionaryObject( COSName.ENCRYPT );
if( root != null )
{
addObjectToWrite( root );
}
if( info != null )
{
addObjectToWrite( info );
}
while( objectsToWrite.size() > 0 )
{
COSBase nextObject = objectsToWrite.removeFirst();
objectsToWriteSet.remove(nextObject);
doWriteObject( nextObject );
}
willEncrypt = false;
if( encrypt != null )
{
addObjectToWrite( encrypt );
}
while( objectsToWrite.size() > 0 )
{
COSBase nextObject = objectsToWrite.removeFirst();
objectsToWriteSet.remove(nextObject);
doWriteObject( nextObject );
}
}
private void addObjectToWrite( COSBase object )
{
COSBase actual = object;
if( actual instanceof COSObject )
{
actual = ((COSObject)actual).getObject();
}
if( !writtenObjects.contains( object ) &&
!objectsToWriteSet.contains( object ) &&
!actualsAdded.contains( actual ) )
{
COSBase cosBase=null;
COSObjectKey cosObjectKey = null;
if(actual != null)
{
cosObjectKey= objectKeys.get(actual);
}
if(cosObjectKey!=null)
{
cosBase = keyObject.get(cosObjectKey);
}
if (actual != null && objectKeys.containsKey(actual)
&& object instanceof COSUpdateInfo && !((COSUpdateInfo)object).isNeedToBeUpdated()
&& cosBase instanceof COSUpdateInfo && !((COSUpdateInfo)cosBase).isNeedToBeUpdated() )
{
return;
}
objectsToWrite.add( object );
objectsToWriteSet.add( object );
if( actual != null )
{
actualsAdded.add( actual );
}
}
}
/**
* This will write a COS object.
*
* @param obj The object to write.
*
* @throws IOException if the output cannot be written
*/
public void doWriteObject( COSBase obj ) throws IOException
{
writtenObjects.add( obj );
if(obj instanceof COSDictionary)
{
COSDictionary dict = (COSDictionary)obj;
COSBase itemType = dict.getItem(COSName.TYPE);
if (itemType instanceof COSName)
{
COSName item = (COSName) itemType;
if (COSName.SIG.equals(item) || COSName.DOC_TIME_STAMP.equals(item))
{
reachedSignature = true;
}
}
}
// find the physical reference
currentObjectKey = getObjectKey( obj );
// add a x ref entry
addXRefEntry( new COSWriterXRefEntry(getStandardOutput().getPos(), obj, currentObjectKey));
// write the object
getStandardOutput().write(String.valueOf(currentObjectKey.getNumber()).getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(String.valueOf(currentObjectKey.getGeneration()).getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(OBJ);
getStandardOutput().writeEOL();
obj.accept( this );
getStandardOutput().writeEOL();
getStandardOutput().write(ENDOBJ);
getStandardOutput().writeEOL();
}
/**
* This will write the header to the PDF document.
*
* @param doc The document to get the data from.
*
* @throws IOException If there is an error writing to the stream.
*/
protected void doWriteHeader(COSDocument doc) throws IOException
{
String headerString;
if (fdfDocument != null)
{
headerString = "%FDF-"+ Float.toString(fdfDocument.getDocument().getVersion());
}
else
{
headerString = "%PDF-"+ Float.toString(pdDocument.getDocument().getVersion());
}
getStandardOutput().write( headerString.getBytes(Charsets.ISO_8859_1) );
getStandardOutput().writeEOL();
getStandardOutput().write(COMMENT);
getStandardOutput().write(GARBAGE);
getStandardOutput().writeEOL();
}
/**
* This will write the trailer to the PDF document.
*
* @param doc The document to create the trailer for.
*
* @throws IOException If there is an IOError while writing the document.
*/
protected void doWriteTrailer(COSDocument doc) throws IOException
{
getStandardOutput().write(TRAILER);
getStandardOutput().writeEOL();
COSDictionary trailer = doc.getTrailer();
//sort xref, needed only if object keys not regenerated
Collections.sort(getXRefEntries());
COSWriterXRefEntry lastEntry = getXRefEntries().get( getXRefEntries().size()-1);
trailer.setLong(COSName.SIZE, lastEntry.getKey().getNumber()+1);
// Only need to stay, if an incremental update will be performed
if (!incrementalUpdate)
{
trailer.removeItem( COSName.PREV );
}
if (!doc.isXRefStream())
{
trailer.removeItem( COSName.XREF_STM );
}
// Remove a checksum if present
trailer.removeItem( COSName.DOC_CHECKSUM );
trailer.accept(this);
}
private void doWriteXRefInc(COSDocument doc, long hybridPrev) throws IOException
{
if (doc.isXRefStream() || hybridPrev != -1)
{
// the file uses XrefStreams, so we need to update
// it with an xref stream. We create a new one and fill it
// with data available here
// create a new XRefStrema object
PDFXRefStream pdfxRefStream = new PDFXRefStream();
// add all entries from the incremental update.
List<COSWriterXRefEntry> xRefEntries2 = getXRefEntries();
for ( COSWriterXRefEntry cosWriterXRefEntry : xRefEntries2 )
{
pdfxRefStream.addEntry(cosWriterXRefEntry);
}
COSDictionary trailer = doc.getTrailer();
if (incrementalUpdate)
{
// use previous startXref value as new PREV value
trailer.setLong(COSName.PREV, doc.getStartXref());
}
else
{
trailer.removeItem(COSName.PREV);
}
pdfxRefStream.addTrailerInfo(trailer);
// the size is the highest object number+1. we add one more
// for the xref stream object we are going to write
pdfxRefStream.setSize(getNumber() + 2);
setStartxref(getStandardOutput().getPos());
COSStream stream2 = pdfxRefStream.getStream();
doWriteObject(stream2);
}
if (!doc.isXRefStream() || hybridPrev != -1)
{
COSDictionary trailer = doc.getTrailer();
trailer.setLong(COSName.PREV, doc.getStartXref());
if (hybridPrev != -1)
{
COSName xrefStm = COSName.XREF_STM;
trailer.removeItem(xrefStm);
trailer.setLong(xrefStm, getStartxref());
}
doWriteXRefTable();
doWriteTrailer(doc);
}
}
// writes the "xref" table
private void doWriteXRefTable() throws IOException
{
addXRefEntry(COSWriterXRefEntry.getNullEntry());
// sort xref, needed only if object keys not regenerated
Collections.sort(getXRefEntries());
// remember the position where x ref was written
setStartxref(getStandardOutput().getPos());
getStandardOutput().write(XREF);
getStandardOutput().writeEOL();
// write start object number and object count for this x ref section
// we assume starting from scratch
Long[] xRefRanges = getXRefRanges(getXRefEntries());
int xRefLength = xRefRanges.length;
int x = 0;
int j = 0;
while (x < xRefLength && (xRefLength % 2) == 0)
{
writeXrefRange(xRefRanges[x], xRefRanges[x + 1]);
for (int i = 0; i < xRefRanges[x + 1]; ++i)
{
writeXrefEntry(xRefEntries.get(j++));
}
x += 2;
}
}
private void doWriteSignature() throws IOException
{
if (signatureOffset == 0 || byteRangeOffset == 0)
{
return;
}
// calculate the ByteRange values
long inLength = incrementalInput.available();
long beforeLength = signatureOffset;
long afterOffset = signatureOffset + signatureLength;
long afterLength = getStandardOutput().getPos() - (inLength + signatureLength) - (signatureOffset - inLength);
String byteRange = "0 " + beforeLength + " " + afterOffset + " " + afterLength + "]";
if (byteRangeLength - byteRange.length() < 0)
{
throw new IOException("Can't write new ByteRange, not enough space");
}
// copy the new incremental data into a buffer (e.g. signature dict, trailer)
ByteArrayOutputStream byteOut = (ByteArrayOutputStream) output;
byteOut.flush();
byte[] buffer = byteOut.toByteArray();
// overwrite the ByteRange in the buffer
byte[] byteRangeBytes = byteRange.getBytes(Charsets.ISO_8859_1);
for (int i = 0; i < byteRangeLength; i++)
{
if (i >= byteRangeBytes.length)
{
buffer[(int)(byteRangeOffset + i - inLength)] = 0x20; // SPACE
}
else
{
buffer[(int)(byteRangeOffset + i - inLength)] = byteRangeBytes[i];
}
}
// get the input PDF bytes
byte[] inputBytes = IOUtils.toByteArray(incrementalInput);
// get only the incremental bytes to be signed (includes /ByteRange but not /Contents)
byte[] signBuffer = new byte[buffer.length - (int)signatureLength];
int bufSignatureOffset = (int)(signatureOffset - inLength);
System.arraycopy(buffer, 0, signBuffer, 0, bufSignatureOffset);
System.arraycopy(buffer, bufSignatureOffset + (int)signatureLength,
signBuffer, bufSignatureOffset, buffer.length - bufSignatureOffset - (int)signatureLength);
SequenceInputStream signStream = new SequenceInputStream(new ByteArrayInputStream(inputBytes),
new ByteArrayInputStream(signBuffer));
// sign the bytes
byte[] sign = signatureInterface.sign(signStream);
String signature = new COSString(sign).toHexString();
// substract 2 bytes because of the enclosing "<>"
if (signature.length() > signatureLength - 2)
{
throw new IOException("Can't write signature, not enough space");
}
// overwrite the signature Contents in the buffer
byte[] signatureBytes = signature.getBytes(Charsets.ISO_8859_1);
System.arraycopy(signatureBytes, 0, buffer, bufSignatureOffset + 1, signatureBytes.length);
// write the data to the incremental output stream
incrementalOutput.write(inputBytes);
incrementalOutput.write(buffer);
}
private void writeXrefRange(long x, long y) throws IOException
{
getStandardOutput().write(String.valueOf(x).getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(String.valueOf(y).getBytes(Charsets.ISO_8859_1));
getStandardOutput().writeEOL();
}
private void writeXrefEntry(COSWriterXRefEntry entry) throws IOException
{
String offset = formatXrefOffset.format(entry.getOffset());
String generation = formatXrefGeneration.format(entry.getKey().getGeneration());
getStandardOutput().write(offset.getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(generation.getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(entry.isFree() ? XREF_FREE : XREF_USED);
getStandardOutput().writeCRLF();
}
/**
* check the xref entries and write out the ranges. The format of the
* returned array is exactly the same as the pdf specification. See section
* 7.5.4 of ISO32000-1:2008, example 1 (page 40) for reference.
* <p>
* example: 0 1 2 5 6 7 8 10
* <p>
* will create a array with follow ranges
* <p>
* 0 3 5 4 10 1
* <p>
* this mean that the element 0 is followed by two other related numbers
* that represent a cluster of the size 3. 5 is follow by three other
* related numbers and create a cluster of size 4. etc.
*
* @param xRefEntriesList list with the xRef entries that was written
* @return a integer array with the ranges
*/
protected Long[] getXRefRanges(List<COSWriterXRefEntry> xRefEntriesList)
{
long last = -2;
long count = 1;
List<Long> list = new ArrayList<Long>();
for( Object object : xRefEntriesList )
{
long nr = (int) ((COSWriterXRefEntry) object).getKey().getNumber();
if (nr == last + 1)
{
++count;
last = nr;
}
else if (last == -2)
{
last = nr;
}
else
{
list.add(last - count + 1);
list.add(count);
last = nr;
count = 1;
}
}
// If no new entry is found, we need to write out the last result
if(xRefEntriesList.size() > 0)
{
list.add(last - count + 1);
list.add(count);
}
return list.toArray(new Long[list.size()]);
}
/**
* This will get the object key for the object.
*
* @param obj The object to get the key for.
*
* @return The object key for the object.
*/
private COSObjectKey getObjectKey( COSBase obj )
{
COSBase actual = obj;
if( actual instanceof COSObject )
{
actual = ((COSObject)obj).getObject();
}
COSObjectKey key = null;
if( actual != null )
{
key = objectKeys.get(actual);
}
if( key == null )
{
key = objectKeys.get(obj);
}
if (key == null)
{
setNumber(getNumber()+1);
key = new COSObjectKey(getNumber(),0);
objectKeys.put(obj, key);
if( actual != null )
{
objectKeys.put(actual, key);
}
}
return key;
}
@Override
public Object visitFromArray( COSArray obj ) throws IOException
{
int count = 0;
getStandardOutput().write(ARRAY_OPEN);
for (Iterator<COSBase> i = obj.iterator(); i.hasNext();)
{
COSBase current = i.next();
if( current instanceof COSDictionary )
{
if (current.isDirect())
{
visitFromDictionary((COSDictionary)current);
}
else
{
addObjectToWrite( current );
writeReference( current );
}
}
else if( current instanceof COSObject )
{
COSBase subValue = ((COSObject)current).getObject();
if( subValue instanceof COSDictionary || subValue == null )
{
addObjectToWrite( current );
writeReference( current );
}
else
{
subValue.accept( this );
}
}
else if( current == null )
{
COSNull.NULL.accept( this );
}
else
{
current.accept(this);
}
count++;
if (i.hasNext())
{
if (count % 10 == 0)
{
getStandardOutput().writeEOL();
}
else
{
getStandardOutput().write(SPACE);
}
}
}
getStandardOutput().write(ARRAY_CLOSE);
getStandardOutput().writeEOL();
return null;
}
@Override
public Object visitFromBoolean(COSBoolean obj) throws IOException
{
obj.writePDF( getStandardOutput() );
return null;
}
@Override
public Object visitFromDictionary(COSDictionary obj) throws IOException
{
getStandardOutput().write(DICT_OPEN);
getStandardOutput().writeEOL();
for (Map.Entry<COSName, COSBase> entry : obj.entrySet())
{
COSBase value = entry.getValue();
if (value != null)
{
entry.getKey().accept(this);
getStandardOutput().write(SPACE);
if( value instanceof COSDictionary )
{
COSDictionary dict = (COSDictionary)value;
// write all XObjects as direct objects, this will save some size
COSBase item = dict.getItem(COSName.XOBJECT);
if(item!=null)
{
item.setDirect(true);
}
item = dict.getItem(COSName.RESOURCES);
if(item!=null)
{
item.setDirect(true);
}
if(dict.isDirect())
{
// If the object should be written direct, we need
// to pass the dictionary to the visitor again.
visitFromDictionary(dict);
}
else
{
addObjectToWrite( dict );
writeReference( dict );
}
}
else if( value instanceof COSObject )
{
COSBase subValue = ((COSObject)value).getObject();
if( subValue instanceof COSDictionary || subValue == null )
{
addObjectToWrite( value );
writeReference( value );
}
else
{
subValue.accept( this );
}
}
else
{
// If we reach the pdf signature, we need to determinate the position of the
// content and byterange
if(reachedSignature && COSName.CONTENTS.equals(entry.getKey()))
{
signatureOffset = getStandardOutput().getPos();
value.accept(this);
signatureLength = getStandardOutput().getPos()- signatureOffset;
}
else if(reachedSignature && COSName.BYTERANGE.equals(entry.getKey()))
{
byteRangeOffset = getStandardOutput().getPos() + 1;
value.accept(this);
byteRangeLength = getStandardOutput().getPos() - 1 - byteRangeOffset;
reachedSignature = false;
}
else
{
value.accept(this);
}
}
getStandardOutput().writeEOL();
}
else
{
//then we won't write anything, there are a couple cases
//were the value of an entry in the COSDictionary will
//be a dangling reference that points to nothing
//so we will just not write out the entry if that is the case
}
}
getStandardOutput().write(DICT_CLOSE);
getStandardOutput().writeEOL();
return null;
}
@Override
public Object visitFromDocument(COSDocument doc) throws IOException
{
if(!incrementalUpdate)
{
doWriteHeader(doc);
}
else
{
// Sometimes the original file will be missing a newline at the end
// In order to avoid having %%EOF the first object on the same line
// as the %%EOF, we put a newline here. If there's already one at
// the end of the file, an extra one won't hurt. PDFBOX-1051
getStandardOutput().writeCRLF();
}
doWriteBody(doc);
// get the previous trailer
COSDictionary trailer = doc.getTrailer();
long hybridPrev = -1;
if (trailer != null)
{
hybridPrev = trailer.getLong(COSName.XREF_STM);
}
if(incrementalUpdate || doc.isXRefStream())
{
doWriteXRefInc(doc, hybridPrev);
}
else
{
doWriteXRefTable();
doWriteTrailer(doc);
}
// write endof
getStandardOutput().write(STARTXREF);
getStandardOutput().writeEOL();
getStandardOutput().write(String.valueOf(getStartxref()).getBytes(Charsets.ISO_8859_1));
getStandardOutput().writeEOL();
getStandardOutput().write(EOF);
getStandardOutput().writeEOL();
if(incrementalUpdate)
{
doWriteSignature();
}
return null;
}
@Override
public Object visitFromFloat(COSFloat obj) throws IOException
{
obj.writePDF( getStandardOutput() );
return null;
}
@Override
public Object visitFromInt(COSInteger obj) throws IOException
{
obj.writePDF( getStandardOutput() );
return null;
}
@Override
public Object visitFromName(COSName obj) throws IOException
{
obj.writePDF( getStandardOutput() );
return null;
}
@Override
public Object visitFromNull(COSNull obj) throws IOException
{
obj.writePDF(getStandardOutput());
return null;
}
/**
* visitFromObjRef method comment.
*
* @param obj The object that is being visited.
*
* @throws IOException If there is an exception while visiting this object.
*/
public void writeReference(COSBase obj) throws IOException
{
COSObjectKey key = getObjectKey(obj);
getStandardOutput().write(String.valueOf(key.getNumber()).getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(String.valueOf(key.getGeneration()).getBytes(Charsets.ISO_8859_1));
getStandardOutput().write(SPACE);
getStandardOutput().write(REFERENCE);
}
@Override
public Object visitFromStream(COSStream obj) throws IOException
{
if (willEncrypt)
{
pdDocument.getEncryption().getSecurityHandler()
.encryptStream(obj, currentObjectKey.getNumber(),
currentObjectKey.getGeneration());
}
COSObject lengthObject = null;
// check if the length object is required to be direct, like in
// a cross reference stream dictionary
COSBase lengthEntry = obj.getDictionaryObject(COSName.LENGTH);
String type = obj.getNameAsString(COSName.TYPE);
if (lengthEntry != null && lengthEntry.isDirect() || "XRef".equals(type))
{
// the length might be the non encoded length,
// set the real one as direct object
COSInteger cosInteger = COSInteger.get(obj.getFilteredLength());
cosInteger.setDirect(true);
obj.setItem(COSName.LENGTH, cosInteger);
}
else
{
// make the length an implicit indirect object
// set the length of the stream and write stream dictionary
lengthObject = new COSObject(null);
obj.setItem(COSName.LENGTH, lengthObject);
}
InputStream input = null;
try
{
input = obj.getFilteredStream();
//obj.accept(this);
// write the stream content
visitFromDictionary(obj);
getStandardOutput().write(STREAM);
getStandardOutput().writeCRLF();
byte[] buffer = new byte[1024];
int amountRead;
int totalAmountWritten = 0;
while ((amountRead = input.read(buffer, 0, 1024)) != -1)
{
getStandardOutput().write(buffer, 0, amountRead);
totalAmountWritten += amountRead;
}
// set the length as an indirect object
if (lengthObject != null)
{
lengthObject.setObject(COSInteger.get(totalAmountWritten));
}
getStandardOutput().writeCRLF();
getStandardOutput().write(ENDSTREAM);
getStandardOutput().writeEOL();
return null;
}
finally
{
if (input != null)
{
input.close();
}
}
}
@Override
public Object visitFromString(COSString obj) throws IOException
{
if (willEncrypt)
{
pdDocument.getEncryption().getSecurityHandler().encryptString(
obj,
currentObjectKey.getNumber(),
currentObjectKey.getGeneration());
}
COSWriter.writeString(obj, getStandardOutput());
return null;
}
/**
* This will write the pdf document.
*
* @throws IOException If an error occurs while generating the data.
* @param doc The document to write.
*/
public void write(COSDocument doc) throws IOException
{
PDDocument pdDoc = new PDDocument( doc );
write( pdDoc );
}
/**
* This will write the pdf document.
*
* @param doc The document to write.
*
* @throws IOException If an error occurs while generating the data.
*/
public void write(PDDocument doc) throws IOException
{
write(doc, null);
}
/**
* This will write the pdf document.
*
* @param doc The document to write.
* @param signInterface class to be used for signing
*
* @throws IOException If an error occurs while generating the data.
* @throws IllegalStateException If the document has an encryption dictionary but no protection
* policy.
*/
public void write(PDDocument doc, SignatureInterface signInterface) throws IOException
{
Long idTime = doc.getDocumentId() == null ? System.currentTimeMillis() :
doc.getDocumentId();
pdDocument = doc;
signatureInterface = signInterface;
if(incrementalUpdate)
{
prepareIncrement(doc);
}
// if the document says we should remove encryption, then we shouldn't encrypt
if(doc.isAllSecurityToBeRemoved())
{
willEncrypt = false;
// also need to get rid of the "Encrypt" in the trailer so readers
// don't try to decrypt a document which is not encrypted
COSDocument cosDoc = doc.getDocument();
COSDictionary trailer = cosDoc.getTrailer();
trailer.removeItem(COSName.ENCRYPT);
}
else
{
if (pdDocument.getEncryption() != null)
{
SecurityHandler securityHandler = pdDocument.getEncryption().getSecurityHandler();
if (!securityHandler.hasProtectionPolicy())
{
throw new IllegalStateException("PDF contains an encryption dictionary, please remove it with "
+ "setAllSecurityToBeRemoved() or set a protection policy with protect()");
}
securityHandler.prepareDocumentForEncryption(pdDocument);
willEncrypt = true;
}
else
{
willEncrypt = false;
}
}
COSDocument cosDoc = pdDocument.getDocument();
COSDictionary trailer = cosDoc.getTrailer();
COSArray idArray = (COSArray)trailer.getDictionaryObject( COSName.ID );
boolean missingID = true;
// check for an existing documentID
if (idArray != null && idArray.size() == 2)
{
missingID = false;
}
if( missingID || incrementalUpdate)
{
MessageDigest md5;
try
{
md5 = MessageDigest.getInstance("MD5");
}
catch (NoSuchAlgorithmException e)
{
// should never happen
throw new RuntimeException(e);
}
// algorithm says to use time/path/size/values in doc to generate the id.
// we don't have path or size, so do the best we can
md5.update( Long.toString(idTime).getBytes(Charsets.ISO_8859_1) );
COSDictionary info = (COSDictionary)trailer.getDictionaryObject( COSName.INFO );
if( info != null )
{
Iterator<COSBase> values = info.getValues().iterator();
while( values.hasNext() )
{
md5.update(values.next().toString().getBytes(Charsets.ISO_8859_1));
}
}
// reuse origin documentID if available as first value
COSString firstID = missingID ? new COSString( md5.digest() ) : (COSString)idArray.get(0);
// it's ok to use the same ID for the second part if the ID is created for the first time
COSString secondID = missingID ? firstID : new COSString( md5.digest() );
idArray = new COSArray();
idArray.add( firstID );
idArray.add( secondID );
trailer.setItem( COSName.ID, idArray );
}
cosDoc.accept(this);
}
/**
* This will write the fdf document.
*
* @param doc The document to write.
*
* @throws IOException If an error occurs while generating the data.
*/
public void write(FDFDocument doc) throws IOException
{
fdfDocument = doc;
willEncrypt = false;
COSDocument cosDoc = fdfDocument.getDocument();
cosDoc.accept(this);
}
/**
* This will output the given byte getString as a PDF object.
*
* @param string COSString to be written
* @param output The stream to write to.
* @throws IOException If there is an error writing to the stream.
*/
public static void writeString(COSString string, OutputStream output) throws IOException
{
writeString(string.getBytes(), string.getForceHexForm(), output);
}
/**
* This will output the given text/byte getString as a PDF object.
*
* @param bytes byte array representation of a string to be written
* @param output The stream to write to.
* @throws IOException If there is an error writing to the stream.
*/
public static void writeString(byte[] bytes, OutputStream output) throws IOException
{
writeString(bytes, false, output);
}
/**
* This will output the given text/byte string as a PDF object.
*
* @param output The stream to write to.
* @throws IOException If there is an error writing to the stream.
*/
private static void writeString(byte[] bytes, boolean forceHex, OutputStream output)
throws IOException
{
// check for non-ASCII characters
boolean isASCII = true;
for (byte b : bytes)
{
// if the byte is negative then it is an eight bit byte and is outside the ASCII range
if (b < 0)
{
isASCII = false;
break;
}
}
if (isASCII && !forceHex)
{
// write ASCII string
output.write('(');
for (byte b : bytes)
{
switch (b)
{
case '(':
case ')':
case '\\':
output.write('\\');
output.write(b);
break;
default:
output.write(b);
}
}
output.write(')');
}
else
{
// write hex string
output.write('<');
for (byte b : bytes)
{
output.write(Hex.getBytes(b));
}
output.write('>');
}
}
}
| |
/*
* Copyright 2002-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.araneaframework.core;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
/**
* <p>Assists in validating arguments.</p>
*
* <p>The class is based along the lines of JUnit. If an argument value is
* deemed invalid, an IllegalArgumentException is thrown. For example:</p>
*
* <pre>
* Assert.isTrue( i > 0, "The value must be greater than zero: ", i);
* Assert.notNull( surname, "The surname must not be null");
* </pre>
*
* <p>Copied from Jakarta Commons Lang for framework internal use.
* Please use the original from <a href="http://jakarta.apache.org/commons/lang/">http://jakarta.apache.org/commons/lang/</a>.</p>
*
* @author Ola Berg
* @author Stephen Colebourne
* @author Gary Gregory
* @author Norm Deane
* @author Jevgeni Kabanov (ekabanov@araneaframework.org)
*/
public abstract class Assert {
public static String thisToString(Object that) {
notNull(that, "'this' can never be null, check what you passed to Assert!");
return " (in '" + that.getClass().getName() + "')";
}
public static void isTrue(Object that, boolean expression, String message) {
if (expression == false) {
throw new IllegalArgumentException(message + thisToString(that));
}
}
public static void isTrue(boolean expression, String message) {
if (expression == false) {
throw new IllegalArgumentException(message);
}
}
public static void notNull(Object object) {
if (object == null) {
throw new IllegalArgumentException("The object under assertion was null!");
}
}
public static void notNull(Object that, Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message + thisToString(that));
}
}
public static void notNull(Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message);
}
}
public static void notNullParam(Object object, String parameterName) {
if (object == null) {
throw new IllegalArgumentException("The parameter '" + parameterName + "' must not be null!");
}
}
public static void notNullParam(Object that, Object object, String parameterName) {
if (object == null) {
throw new IllegalArgumentException("The Parameter '" + parameterName + "' must not be null!" + thisToString(that));
}
}
public static void isInstanceOf(Object that, Class<?> klass, Object object, String message) {
if (object == null) return;
if (!klass.isAssignableFrom(object.getClass())) {
throw new IllegalArgumentException(message + thisToString(that));
}
}
public static void isInstanceOf( Class<?> klass, Object object, String message) {
if (object == null) return;
if (!klass.isAssignableFrom(object.getClass())) {
throw new IllegalArgumentException(message);
}
}
public static void isInstanceOfParam(Object that, Class<?> klass, Object object, String parameterName) {
if (object == null) return;
if (!klass.isAssignableFrom(object.getClass())) {
throw new IllegalArgumentException("Parameter '" + parameterName + "' must be of type '" + klass.getName() + "' but is of type '" + object.getClass().getName() + "'!" + thisToString(that));
}
}
public static void isInstanceOfParam( Class<?> klass, Object object, String parameterName) {
if (object == null) return;
if (!klass.isAssignableFrom(object.getClass())) {
throw new IllegalArgumentException("Parameter '" + parameterName + "' must be of type '" + klass.getName() + "' but is of type '" + object.getClass().getName() + "'!");
}
}
public static void notEmptyParam(String string, String parameterName) {
if (string == null || string.length() == 0) {
throw new IllegalArgumentException("Parameter '" + parameterName + "' must not be empty!");
}
}
public static void notEmptyParam(Object that, String string, String parameterName) {
if (string == null || string.length() == 0) {
throw new IllegalArgumentException("Parameter '" + parameterName + "' must not be empty!" + thisToString(that));
}
}
public static void notEmpty(Object[] array, String message) {
if (array == null || array.length == 0) {
throw new IllegalArgumentException(message);
}
}
public static void notEmpty(Collection<?> collection, String message) {
if (collection == null || collection.size() == 0) {
throw new IllegalArgumentException(message);
}
}
public static void notEmpty(Map<?,?> map, String message) {
if (map == null || map.size() == 0) {
throw new IllegalArgumentException(message);
}
}
public static void notEmpty(String string, String message) {
if (string == null || string.length() == 0) {
throw new IllegalArgumentException(message);
}
}
public static void noNullElementsParam(Collection<?> collection, String param) {
notNullParam(collection, param);
int i = 0;
for (Iterator<?> it = collection.iterator(); it.hasNext();) {
if (it.next() == null) {
throw new IllegalArgumentException("The validated collection parameter '" + param
+ "' contains null element at index: '" + i + "'!");
}
i++;
}
}
public static void noNullElements(Collection<?> collection, String message) {
notNull(collection);
int i = 0;
for (Object element : collection) {
if (element == null)
throw new IllegalArgumentException(message);
i++;
}
}
public static void noNullElementsParam(Object that, Collection<?> collection, String param) {
notNullParam(collection, param);
int i = 0;
for (Object element : collection) {
if (element == null)
throw new IllegalArgumentException("The validated collection parameter '" + param
+ "' contains null element at index: '" + i + "'!" + thisToString(that));
i++;
}
}
public static void noNullElements(Object that, Collection<?> collection, String message) {
notNull(collection);
int i = 0;
for (Object element : collection) {
if (element == null)
throw new IllegalArgumentException(message + thisToString(that));
i++;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import org.apache.lucene.util.LuceneTestCase;
import org.bouncycastle.bcpg.ArmoredOutputStream;
import org.bouncycastle.bcpg.BCPGOutputStream;
import org.bouncycastle.bcpg.HashAlgorithmTags;
import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider;
import org.bouncycastle.openpgp.PGPEncryptedData;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPKeyPair;
import org.bouncycastle.openpgp.PGPPrivateKey;
import org.bouncycastle.openpgp.PGPPublicKey;
import org.bouncycastle.openpgp.PGPSecretKey;
import org.bouncycastle.openpgp.PGPSignature;
import org.bouncycastle.openpgp.PGPSignatureGenerator;
import org.bouncycastle.openpgp.operator.PGPDigestCalculator;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPDigestCalculatorProviderBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPKeyPair;
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyEncryptorBuilder;
import org.elasticsearch.Build;
import org.elasticsearch.Version;
import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.PathUtilsForTesting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.PosixPermissionsResetter;
import org.junit.After;
import org.junit.Before;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystem;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.GroupPrincipal;
import java.nio.file.attribute.PosixFileAttributeView;
import java.nio.file.attribute.PosixFileAttributes;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.UserPrincipal;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.startsWith;
@LuceneTestCase.SuppressFileSystems("*")
public class InstallPluginCommandTests extends ESTestCase {
private InstallPluginCommand skipJarHellCommand;
private InstallPluginCommand defaultCommand;
private final Function<String, Path> temp;
private final MockTerminal terminal = new MockTerminal();
private final FileSystem fs;
private final boolean isPosix;
private final boolean isReal;
private final String javaIoTmpdir;
@SuppressForbidden(reason = "sets java.io.tmpdir")
public InstallPluginCommandTests(FileSystem fs, Function<String, Path> temp) {
this.fs = fs;
this.temp = temp;
this.isPosix = fs.supportedFileAttributeViews().contains("posix");
this.isReal = fs == PathUtils.getDefaultFileSystem();
PathUtilsForTesting.installMock(fs);
javaIoTmpdir = System.getProperty("java.io.tmpdir");
System.setProperty("java.io.tmpdir", temp.apply("tmpdir").toString());
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
skipJarHellCommand = new InstallPluginCommand() {
@Override
void jarHellCheck(PluginInfo candidateInfo, Path candidate, Path pluginsDir, Path modulesDir) throws Exception {
// no jarhell check
}
};
defaultCommand = new InstallPluginCommand();
terminal.reset();
}
@Override
@After
@SuppressForbidden(reason = "resets java.io.tmpdir")
public void tearDown() throws Exception {
defaultCommand.close();
skipJarHellCommand.close();
System.setProperty("java.io.tmpdir", javaIoTmpdir);
PathUtilsForTesting.teardown();
super.tearDown();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
class Parameter {
private final FileSystem fileSystem;
private final Function<String, Path> temp;
Parameter(FileSystem fileSystem, String root) {
this(fileSystem, s -> {
try {
return Files.createTempDirectory(fileSystem.getPath(root), s);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
Parameter(FileSystem fileSystem, Function<String, Path> temp) {
this.fileSystem = fileSystem;
this.temp = temp;
}
}
List<Parameter> parameters = new ArrayList<>();
parameters.add(new Parameter(Jimfs.newFileSystem(Configuration.windows()), "c:\\"));
parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.osX())), "/"));
parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.unix())), "/"));
parameters.add(new Parameter(PathUtils.getDefaultFileSystem(), LuceneTestCase::createTempDir));
return parameters.stream().map(p -> new Object[] { p.fileSystem, p.temp }).collect(Collectors.toList());
}
private static Configuration toPosix(Configuration configuration) {
return configuration.toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build();
}
/** Creates a test environment with bin, config and plugins directories. */
static Tuple<Path, Environment> createEnv(FileSystem fs, Function<String, Path> temp) throws IOException {
Path home = temp.apply("install-plugin-command-tests");
Files.createDirectories(home.resolve("bin"));
Files.createFile(home.resolve("bin").resolve("elasticsearch"));
Files.createDirectories(home.resolve("config"));
Files.createFile(home.resolve("config").resolve("elasticsearch.yml"));
Path plugins = Files.createDirectories(home.resolve("plugins"));
assertTrue(Files.exists(plugins));
Settings settings = Settings.builder().put("path.home", home).build();
return Tuple.tuple(home, TestEnvironment.newEnvironment(settings));
}
static Path createPluginDir(Function<String, Path> temp) throws IOException {
return temp.apply("pluginDir");
}
/** creates a fake jar file with empty class files */
static void writeJar(Path jar, String... classes) throws IOException {
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(jar))) {
for (String clazz : classes) {
stream.putNextEntry(new ZipEntry(clazz + ".class")); // no package names, just support simple classes
}
}
}
static Path writeZip(Path structure, String prefix) throws IOException {
Path zip = createTempDir().resolve(structure.getFileName() + ".zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
Files.walkFileTree(structure, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String target = (prefix == null ? "" : prefix + "/") + structure.relativize(file).toString();
stream.putNextEntry(new ZipEntry(target));
Files.copy(file, stream);
return FileVisitResult.CONTINUE;
}
});
}
return zip;
}
/** creates a plugin .zip and returns the url for testing */
static String createPluginUrl(String name, Path structure, String... additionalProps) throws IOException {
return createPlugin(name, structure, additionalProps).toUri().toURL().toString();
}
static void writePlugin(String name, Path structure, String... additionalProps) throws IOException {
String[] properties = Stream.concat(
Stream.of(
"description",
"fake desc",
"name",
name,
"version",
"1.0",
"elasticsearch.version",
Version.CURRENT.toString(),
"java.version",
System.getProperty("java.specification.version"),
"classname",
"FakePlugin"
),
Arrays.stream(additionalProps)
).toArray(String[]::new);
PluginTestUtil.writePluginProperties(structure, properties);
String className = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1) + "Plugin";
writeJar(structure.resolve("plugin.jar"), className);
}
static void writePluginSecurityPolicy(Path pluginDir, String... permissions) throws IOException {
StringBuilder securityPolicyContent = new StringBuilder("grant {\n ");
for (String permission : permissions) {
securityPolicyContent.append("permission java.lang.RuntimePermission \"");
securityPolicyContent.append(permission);
securityPolicyContent.append("\";");
}
securityPolicyContent.append("\n};\n");
Files.write(pluginDir.resolve("plugin-security.policy"), securityPolicyContent.toString().getBytes(StandardCharsets.UTF_8));
}
static Path createPlugin(String name, Path structure, String... additionalProps) throws IOException {
writePlugin(name, structure, additionalProps);
return writeZip(structure, null);
}
void installPlugin(String pluginUrl, Path home) throws Exception {
installPlugin(pluginUrl, home, skipJarHellCommand);
}
void installPlugins(final List<String> pluginUrls, final Path home) throws Exception {
installPlugins(pluginUrls, home, skipJarHellCommand);
}
void installPlugin(String pluginUrl, Path home, InstallPluginCommand command) throws Exception {
installPlugins(pluginUrl == null ? List.of() : List.of(pluginUrl), home, command);
}
void installPlugins(final List<String> pluginUrls, final Path home, final InstallPluginCommand command) throws Exception {
final Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", home).build());
command.execute(terminal, pluginUrls, false, env);
}
void assertPlugin(String name, Path original, Environment env) throws IOException {
assertPluginInternal(name, env.pluginsFile());
assertConfigAndBin(name, original, env);
assertInstallCleaned(env);
}
void assertPluginInternal(String name, Path pluginsFile) throws IOException {
Path got = pluginsFile.resolve(name);
assertTrue("dir " + name + " exists", Files.exists(got));
if (isPosix) {
Set<PosixFilePermission> perms = Files.getPosixFilePermissions(got);
assertThat(
perms,
containsInAnyOrder(
PosixFilePermission.OWNER_READ,
PosixFilePermission.OWNER_WRITE,
PosixFilePermission.OWNER_EXECUTE,
PosixFilePermission.GROUP_READ,
PosixFilePermission.GROUP_EXECUTE,
PosixFilePermission.OTHERS_READ,
PosixFilePermission.OTHERS_EXECUTE
)
);
}
assertTrue("jar was copied", Files.exists(got.resolve("plugin.jar")));
assertFalse("bin was not copied", Files.exists(got.resolve("bin")));
assertFalse("config was not copied", Files.exists(got.resolve("config")));
}
void assertConfigAndBin(String name, Path original, Environment env) throws IOException {
if (Files.exists(original.resolve("bin"))) {
Path binDir = env.binFile().resolve(name);
assertTrue("bin dir exists", Files.exists(binDir));
assertTrue("bin is a dir", Files.isDirectory(binDir));
PosixFileAttributes binAttributes = null;
if (isPosix) {
binAttributes = Files.readAttributes(env.binFile(), PosixFileAttributes.class);
}
try (DirectoryStream<Path> stream = Files.newDirectoryStream(binDir)) {
for (Path file : stream) {
assertFalse("not a dir", Files.isDirectory(file));
if (isPosix) {
PosixFileAttributes attributes = Files.readAttributes(file, PosixFileAttributes.class);
assertEquals(InstallPluginCommand.BIN_FILES_PERMS, attributes.permissions());
}
}
}
}
if (Files.exists(original.resolve("config"))) {
Path configDir = env.configFile().resolve(name);
assertTrue("config dir exists", Files.exists(configDir));
assertTrue("config is a dir", Files.isDirectory(configDir));
UserPrincipal user = null;
GroupPrincipal group = null;
if (isPosix) {
PosixFileAttributes configAttributes = Files.getFileAttributeView(env.configFile(), PosixFileAttributeView.class)
.readAttributes();
user = configAttributes.owner();
group = configAttributes.group();
PosixFileAttributes attributes = Files.getFileAttributeView(configDir, PosixFileAttributeView.class).readAttributes();
assertThat(attributes.owner(), equalTo(user));
assertThat(attributes.group(), equalTo(group));
}
try (DirectoryStream<Path> stream = Files.newDirectoryStream(configDir)) {
for (Path file : stream) {
assertFalse("not a dir", Files.isDirectory(file));
if (isPosix) {
PosixFileAttributes attributes = Files.readAttributes(file, PosixFileAttributes.class);
if (user != null) {
assertThat(attributes.owner(), equalTo(user));
}
if (group != null) {
assertThat(attributes.group(), equalTo(group));
}
}
}
}
}
}
void assertInstallCleaned(Environment env) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) {
for (Path file : stream) {
if (file.getFileName().toString().startsWith(".installing")) {
fail("Installation dir still exists, " + file);
}
}
}
}
public void testMissingPluginId() throws IOException {
final Tuple<Path, Environment> env = createEnv(fs, temp);
final UserException e = expectThrows(UserException.class, () -> installPlugin(null, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("at least one plugin id is required"));
}
public void testSomethingWorks() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testMultipleWorks() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String fake1PluginZip = createPluginUrl("fake1", pluginDir);
String fake2PluginZip = createPluginUrl("fake2", pluginDir);
installPlugins(List.of(fake1PluginZip, fake2PluginZip), env.v1());
assertPlugin("fake1", pluginDir, env.v2());
assertPlugin("fake2", pluginDir, env.v2());
}
public void testDuplicateInstall() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
final UserException e = expectThrows(UserException.class, () -> installPlugins(List.of(pluginZip, pluginZip), env.v1()));
assertThat(e, hasToString(containsString("duplicate plugin id [" + pluginZip + "]")));
}
public void testTransaction() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
final FileNotFoundException e = expectThrows(
FileNotFoundException.class,
() -> installPlugins(List.of(pluginZip, pluginZip + "does-not-exist"), env.v1())
);
assertThat(e, hasToString(containsString("does-not-exist")));
final Path fakeInstallPath = env.v2().pluginsFile().resolve("fake");
// fake should have been removed when the file not found exception occurred
assertFalse(Files.exists(fakeInstallPath));
assertInstallCleaned(env.v2());
}
public void testInstallFailsIfPreviouslyRemovedPluginFailed() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
Files.createDirectory(removing);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
final String expected = String.format(
Locale.ROOT,
"found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]",
removing
);
assertThat(e, hasToString(containsString(expected)));
}
public void testSpaceInUrl() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
Path pluginZipWithSpaces = createTempFile("foo bar", ".zip");
try (InputStream in = FileSystemUtils.openFileURLStream(new URL(pluginZip))) {
Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING);
}
installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testMalformedUrlNotMaven() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
// has two colons, so it appears similar to maven coordinates
MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("no protocol"));
}
public void testFileNotMaven() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
String dir = randomAlphaOfLength(10) + ":" + randomAlphaOfLength(5) + "\\" + randomAlphaOfLength(5);
Exception e = expectThrows(
Exception.class,
// has two colons, so it appears similar to maven coordinates
() -> installPlugin("file:" + dir, env.v1())
);
assertFalse(e.getMessage(), e.getMessage().contains("maven.org"));
assertTrue(e.getMessage(), e.getMessage().contains(dir));
}
public void testUnknownPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
UserException e = expectThrows(UserException.class, () -> installPlugin("foo", env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Unknown plugin foo"));
}
public void testPluginsDirReadOnly() throws Exception {
assumeTrue("posix and filesystem", isPosix && isReal);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) {
pluginsAttrs.setPermissions(new HashSet<>());
String pluginZip = createPluginUrl("fake", pluginDir);
IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString()));
}
assertInstallCleaned(env.v2());
}
public void testBuiltinModule() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("lang-painless", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("is a system module"));
assertInstallCleaned(env.v2());
}
public void testBuiltinXpackModule() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("x-pack", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("is a system module"));
assertInstallCleaned(env.v2());
}
public void testJarHell() throws Exception {
// jar hell test needs a real filesystem
assumeTrue("real filesystem", isReal);
Tuple<Path, Environment> environment = createEnv(fs, temp);
Path pluginDirectory = createPluginDir(temp);
writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin");
String pluginZip = createPluginUrl("fake", pluginDirectory); // adds plugin.jar with FakePlugin
IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> installPlugin(pluginZip, environment.v1(), defaultCommand)
);
assertTrue(e.getMessage(), e.getMessage().contains("jar hell"));
assertInstallCleaned(environment.v2());
}
public void testIsolatedPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
// these both share the same FakePlugin class
Path pluginDir1 = createPluginDir(temp);
String pluginZip1 = createPluginUrl("fake1", pluginDir1);
installPlugin(pluginZip1, env.v1());
Path pluginDir2 = createPluginDir(temp);
String pluginZip2 = createPluginUrl("fake2", pluginDir2);
installPlugin(pluginZip2, env.v1());
assertPlugin("fake1", pluginDir1, env.v2());
assertPlugin("fake2", pluginDir2, env.v2());
}
public void testExistingPlugin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("already exists"));
assertInstallCleaned(env.v2());
}
public void testBin() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testBinNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createFile(binDir);
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testBinContainsDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path dirInBinDir = pluginDir.resolve("bin").resolve("foo");
Files.createDirectories(dirInBinDir);
Files.createFile(dirInBinDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin"));
assertInstallCleaned(env.v2());
}
public void testBinConflict() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("elasticsearch", pluginDir);
FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString()));
assertInstallCleaned(env.v2());
}
public void testBinPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path binDir = pluginDir.resolve("bin");
Files.createDirectory(binDir);
Files.createFile(binDir.resolve("somescript"));
String pluginZip = createPluginUrl("fake", pluginDir);
try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) {
Set<PosixFilePermission> perms = binAttrs.getCopyPermissions();
// make sure at least one execute perm is missing, so we know we forced it during installation
perms.remove(PosixFilePermission.GROUP_EXECUTE);
binAttrs.setPermissions(perms);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
}
public void testPluginPermissions() throws Exception {
assumeTrue("posix filesystem", isPosix);
final Tuple<Path, Environment> env = createEnv(fs, temp);
final Path pluginDir = createPluginDir(temp);
final Path resourcesDir = pluginDir.resolve("resources");
final Path platformDir = pluginDir.resolve("platform");
final Path platformNameDir = platformDir.resolve("linux-x86_64");
final Path platformBinDir = platformNameDir.resolve("bin");
Files.createDirectories(platformBinDir);
Files.createFile(pluginDir.resolve("fake-" + Version.CURRENT.toString() + ".jar"));
Files.createFile(platformBinDir.resolve("fake_executable"));
Files.createDirectory(resourcesDir);
Files.createFile(resourcesDir.resolve("resource"));
final String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
final Path fake = env.v2().pluginsFile().resolve("fake");
final Path resources = fake.resolve("resources");
final Path platform = fake.resolve("platform");
final Path platformName = platform.resolve("linux-x86_64");
final Path bin = platformName.resolve("bin");
assert755(fake);
assert644(fake.resolve("fake-" + Version.CURRENT + ".jar"));
assert755(resources);
assert644(resources.resolve("resource"));
assert755(platform);
assert755(platformName);
assert755(bin.resolve("fake_executable"));
}
private void assert644(final Path path) throws IOException {
final Set<PosixFilePermission> permissions = Files.getPosixFilePermissions(path);
assertTrue(permissions.contains(PosixFilePermission.OWNER_READ));
assertTrue(permissions.contains(PosixFilePermission.OWNER_WRITE));
assertFalse(permissions.contains(PosixFilePermission.OWNER_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_READ));
assertFalse(permissions.contains(PosixFilePermission.GROUP_WRITE));
assertFalse(permissions.contains(PosixFilePermission.GROUP_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_READ));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_WRITE));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_EXECUTE));
}
private void assert755(final Path path) throws IOException {
final Set<PosixFilePermission> permissions = Files.getPosixFilePermissions(path);
assertTrue(permissions.contains(PosixFilePermission.OWNER_READ));
assertTrue(permissions.contains(PosixFilePermission.OWNER_WRITE));
assertTrue(permissions.contains(PosixFilePermission.OWNER_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_READ));
assertFalse(permissions.contains(PosixFilePermission.GROUP_WRITE));
assertTrue(permissions.contains(PosixFilePermission.GROUP_EXECUTE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_READ));
assertFalse(permissions.contains(PosixFilePermission.OTHERS_WRITE));
assertTrue(permissions.contains(PosixFilePermission.OTHERS_EXECUTE));
}
public void testConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.createFile(configDir.resolve("custom.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
}
public void testExistingConfig() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path envConfigDir = env.v2().configFile().resolve("fake");
Files.createDirectories(envConfigDir);
Files.write(envConfigDir.resolve("custom.yml"), "existing config".getBytes(StandardCharsets.UTF_8));
Path pluginDir = createPluginDir(temp);
Path configDir = pluginDir.resolve("config");
Files.createDirectory(configDir);
Files.write(configDir.resolve("custom.yml"), "new config".getBytes(StandardCharsets.UTF_8));
Files.createFile(configDir.resolve("other.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
assertPlugin("fake", pluginDir, env.v2());
List<String> configLines = Files.readAllLines(envConfigDir.resolve("custom.yml"), StandardCharsets.UTF_8);
assertEquals(1, configLines.size());
assertEquals("existing config", configLines.get(0));
assertTrue(Files.exists(envConfigDir.resolve("other.yml")));
}
public void testConfigNotDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createDirectories(pluginDir);
Path configDir = pluginDir.resolve("config");
Files.createFile(configDir);
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("not a directory"));
assertInstallCleaned(env.v2());
}
public void testConfigContainsDir() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path dirInConfigDir = pluginDir.resolve("config").resolve("foo");
Files.createDirectories(dirInConfigDir);
Files.createFile(dirInConfigDir.resolve("myconfig.yml"));
String pluginZip = createPluginUrl("fake", pluginDir);
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin"));
assertInstallCleaned(env.v2());
}
public void testMissingDescriptor() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve("fake.yml"));
String pluginZip = writeZip(pluginDir, null).toUri().toURL().toString();
NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties"));
assertInstallCleaned(env.v2());
}
public void testContainsIntermediateDirectory() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES));
String pluginZip = writeZip(pluginDir, "elasticsearch").toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e.getMessage(), containsString("This plugin was built with an older plugin structure"));
assertInstallCleaned(env.v2());
}
public void testZipRelativeOutsideEntryName() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path zip = createTempDir().resolve("broken.zip");
try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) {
stream.putNextEntry(new ZipEntry("../blah"));
}
String pluginZip = zip.toUri().toURL().toString();
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory"));
assertInstallCleaned(env.v2());
}
public void testOfficialPluginsHelpSortedAndMissingObviouslyWrongPlugins() throws Exception {
MockTerminal terminal = new MockTerminal();
new InstallPluginCommand() {
@Override
protected boolean addShutdownHook() {
return false;
}
}.main(new String[] { "--help" }, terminal);
try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) {
String line = reader.readLine();
// first find the beginning of our list of official plugins
while (line.endsWith("may be installed by name:") == false) {
line = reader.readLine();
}
// now check each line compares greater than the last, until we reach an empty line
String prev = reader.readLine();
line = reader.readLine();
while (line != null && line.trim().isEmpty() == false) {
assertTrue(prev + " < " + line, prev.compareTo(line) < 0);
prev = line;
line = reader.readLine();
// qa is not really a plugin and it shouldn't sneak in
assertThat(line, not(endsWith("qa")));
assertThat(line, not(endsWith("example")));
}
}
}
public void testInstallXPack() throws IOException {
runInstallXPackTest(Build.Flavor.DEFAULT, UserException.class, "this distribution of Elasticsearch contains X-Pack by default");
runInstallXPackTest(
Build.Flavor.OSS,
UserException.class,
"X-Pack is not available with the oss distribution; to use X-Pack features use the default distribution"
);
runInstallXPackTest(Build.Flavor.UNKNOWN, IllegalStateException.class, "your distribution is broken");
}
private <T extends Exception> void runInstallXPackTest(final Build.Flavor flavor, final Class<T> clazz, final String expectedMessage)
throws IOException {
final InstallPluginCommand flavorCommand = new InstallPluginCommand() {
@Override
Build.Flavor buildFlavor() {
return flavor;
}
};
final Environment environment = createEnv(fs, temp).v2();
final T exception = expectThrows(clazz, () -> flavorCommand.execute(terminal, List.of("x-pack"), false, environment));
assertThat(exception, hasToString(containsString(expectedMessage)));
}
public void testInstallMisspelledOfficialPlugins() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
UserException e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?"));
e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin repository, did you mean any of [repository-s3, repository-gcs]?"));
e = expectThrows(UserException.class, () -> installPlugin("unknown_plugin", env.v1()));
assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin"));
}
public void testBatchFlag() throws Exception {
MockTerminal terminal = new MockTerminal();
installPlugin(terminal, true);
assertThat(terminal.getErrorOutput(), containsString("WARNING: plugin requires additional permissions"));
assertThat(terminal.getOutput(), containsString("-> Downloading"));
// No progress bar in batch mode
assertThat(terminal.getOutput(), not(containsString("100%")));
}
public void testQuietFlagDisabled() throws Exception {
MockTerminal terminal = new MockTerminal();
terminal.setVerbosity(randomFrom(Terminal.Verbosity.NORMAL, Terminal.Verbosity.VERBOSE));
installPlugin(terminal, false);
assertThat(terminal.getOutput(), containsString("100%"));
}
public void testQuietFlagEnabled() throws Exception {
MockTerminal terminal = new MockTerminal();
terminal.setVerbosity(Terminal.Verbosity.SILENT);
installPlugin(terminal, false);
assertThat(terminal.getOutput(), not(containsString("100%")));
}
public void testPluginAlreadyInstalled() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir);
installPlugin(pluginZip, env.v1());
final UserException e = expectThrows(
UserException.class,
() -> installPlugin(pluginZip, env.v1(), randomFrom(skipJarHellCommand, defaultCommand))
);
assertThat(
e.getMessage(),
equalTo(
"plugin directory ["
+ env.v2().pluginsFile().resolve("fake")
+ "] already exists; "
+ "if you need to update the plugin, uninstall it first using command 'remove fake'"
)
);
}
private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
// if batch is enabled, we also want to add a security policy
if (isBatch) {
writePluginSecurityPolicy(pluginDir, "setFactory");
}
String pluginZip = createPlugin("fake", pluginDir).toUri().toURL().toString();
skipJarHellCommand.execute(terminal, List.of(pluginZip), isBatch, env.v2());
}
void assertInstallPluginFromUrl(
final String pluginId,
final String name,
final String url,
final String stagingHash,
final boolean isSnapshot,
final String shaExtension,
final Function<byte[], String> shaCalculator,
final PGPSecretKey secretKey,
final BiFunction<byte[], PGPSecretKey, String> signature
) throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
Path pluginZip = createPlugin(name, pluginDir);
InstallPluginCommand command = new InstallPluginCommand() {
@Override
Path downloadZip(Terminal terminal, String urlString, Path tmpDir, boolean isBatch) throws IOException {
assertEquals(url, urlString);
Path downloadedPath = tmpDir.resolve("downloaded.zip");
Files.copy(pluginZip, downloadedPath);
return downloadedPath;
}
@Override
URL openUrl(String urlString) throws IOException {
if ((url + shaExtension).equals(urlString)) {
// calc sha an return file URL to it
Path shaFile = temp.apply("shas").resolve("downloaded.zip" + shaExtension);
byte[] zipbytes = Files.readAllBytes(pluginZip);
String checksum = shaCalculator.apply(zipbytes);
Files.write(shaFile, checksum.getBytes(StandardCharsets.UTF_8));
return shaFile.toUri().toURL();
} else if ((url + ".asc").equals(urlString)) {
final Path ascFile = temp.apply("asc").resolve("downloaded.zip" + ".asc");
final byte[] zipBytes = Files.readAllBytes(pluginZip);
final String asc = signature.apply(zipBytes, secretKey);
Files.write(ascFile, asc.getBytes(StandardCharsets.UTF_8));
return ascFile.toUri().toURL();
}
return null;
}
@Override
void verifySignature(Path zip, String urlString) throws IOException, PGPException {
if (InstallPluginCommand.OFFICIAL_PLUGINS.contains(name)) {
super.verifySignature(zip, urlString);
} else {
throw new UnsupportedOperationException("verify signature should not be called for unofficial plugins");
}
}
@Override
InputStream pluginZipInputStream(Path zip) throws IOException {
return new ByteArrayInputStream(Files.readAllBytes(zip));
}
@Override
String getPublicKeyId() {
return Long.toHexString(secretKey.getKeyID()).toUpperCase(Locale.ROOT);
}
@Override
InputStream getPublicKey() {
try {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final ArmoredOutputStream armored = new ArmoredOutputStream(output);
secretKey.getPublicKey().encode(armored);
armored.close();
return new ByteArrayInputStream(output.toByteArray());
} catch (final IOException e) {
throw new AssertionError(e);
}
}
@Override
boolean urlExists(Terminal terminal, String urlString) throws IOException {
return urlString.equals(url);
}
@Override
String getStagingHash() {
return stagingHash;
}
@Override
boolean isSnapshot() {
return isSnapshot;
}
@Override
void jarHellCheck(PluginInfo candidateInfo, Path candidate, Path pluginsDir, Path modulesDir) throws Exception {
// no jarhell check
}
};
installPlugin(pluginId, env.v1(), command);
assertPlugin(name, pluginDir, env.v2());
}
public void assertInstallPluginFromUrl(
final String pluginId,
final String name,
final String url,
final String stagingHash,
boolean isSnapshot
) throws Exception {
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
assertInstallPluginFromUrl(
pluginId,
name,
url,
stagingHash,
isSnapshot,
".sha512",
checksumAndFilename(digest, url),
newSecretKey(),
this::signature
);
}
public void testOfficialPlugin() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
}
public void testOfficialPluginSnapshot() throws Exception {
String url = String.format(
Locale.ROOT,
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s.zip",
Version.CURRENT,
Build.CURRENT.getQualifiedVersion()
);
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
}
public void testInstallReleaseBuildOfPluginOnSnapshotBuild() {
String url = String.format(
Locale.ROOT,
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s.zip",
Version.CURRENT,
Build.CURRENT.getQualifiedVersion()
);
// attemping to install a release build of a plugin (no staging ID) on a snapshot build should throw a user exception
final UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, true)
);
assertThat(e.exitCode, equalTo(ExitCodes.CONFIG));
assertThat(
e,
hasToString(containsString("attempted to install release build of official plugin on snapshot build of Elasticsearch"))
);
}
public void testOfficialPluginStaging() throws Exception {
String url = "https://staging.elastic.co/"
+ Version.CURRENT
+ "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
}
public void testOfficialPlatformPlugin() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Platforms.PLATFORM_NAME
+ "-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, false);
}
public void testOfficialPlatformPluginSnapshot() throws Exception {
String url = String.format(
Locale.ROOT,
"https://snapshots.elastic.co/%s-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-%s-%s.zip",
Version.CURRENT,
Platforms.PLATFORM_NAME,
Build.CURRENT.getQualifiedVersion()
);
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", true);
}
public void testOfficialPlatformPluginStaging() throws Exception {
String url = "https://staging.elastic.co/"
+ Version.CURRENT
+ "-abc123/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Platforms.PLATFORM_NAME
+ "-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, "abc123", false);
}
public void testMavenPlugin() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false);
}
public void testMavenPlatformPlugin() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-" + Platforms.PLATFORM_NAME + "-1.0.0.zip";
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false);
}
public void testMavenSha1Backcompat() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
MessageDigest digest = MessageDigest.getInstance("SHA-1");
assertInstallPluginFromUrl("mygroup:myplugin:1.0.0", "myplugin", url, null, false, ".sha1", checksum(digest), null, (b, p) -> null);
assertTrue(terminal.getOutput(), terminal.getOutput().contains("sha512 not found, falling back to sha1"));
}
public void testMavenChecksumWithoutFilename() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
assertInstallPluginFromUrl(
"mygroup:myplugin:1.0.0",
"myplugin",
url,
null,
false,
".sha512",
checksum(digest),
null,
(b, p) -> null
);
}
public void testOfficialChecksumWithoutFilename() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha512",
checksum(digest),
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertThat(e.getMessage(), startsWith("Invalid checksum file"));
}
public void testOfficialShaMissing() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-1");
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha1",
checksum(digest),
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertEquals("Plugin checksum missing: " + url + ".sha512", e.getMessage());
}
public void testMavenShaMissing() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"mygroup:myplugin:1.0.0",
"myplugin",
url,
null,
false,
".dne",
bytes -> null,
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertEquals("Plugin checksum missing: " + url + ".sha1", e.getMessage());
}
public void testInvalidShaFileMissingFilename() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha512",
checksum(digest),
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
}
public void testInvalidShaFileMismatchFilename() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha512",
checksumAndString(digest, " repository-s3-" + Build.CURRENT.getQualifiedVersion() + ".zip"),
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertThat(e, hasToString(matches("checksum file at \\[.*\\] is not for this plugin")));
}
public void testInvalidShaFileContainingExtraLine() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
MessageDigest digest = MessageDigest.getInstance("SHA-512");
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha512",
checksumAndString(digest, " analysis-icu-" + Build.CURRENT.getQualifiedVersion() + ".zip\nfoobar"),
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().startsWith("Invalid checksum file"));
}
public void testSha512Mismatch() throws Exception {
String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/analysis-icu-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"analysis-icu",
"analysis-icu",
url,
null,
false,
".sha512",
bytes -> "foobar analysis-icu-" + Build.CURRENT.getQualifiedVersion() + ".zip",
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().contains("SHA-512 mismatch, expected foobar"));
}
public void testSha1Mismatch() throws Exception {
String url = "https://repo1.maven.org/maven2/mygroup/myplugin/1.0.0/myplugin-1.0.0.zip";
UserException e = expectThrows(
UserException.class,
() -> assertInstallPluginFromUrl(
"mygroup:myplugin:1.0.0",
"myplugin",
url,
null,
false,
".sha1",
bytes -> "foobar",
null,
(b, p) -> null
)
);
assertEquals(ExitCodes.IO_ERROR, e.exitCode);
assertTrue(e.getMessage(), e.getMessage().contains("SHA-1 mismatch, expected foobar"));
}
public void testPublicKeyIdMismatchToExpectedPublicKeyId() throws Exception {
final String icu = "analysis-icu";
final String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/"
+ icu
+ "-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
/*
* To setup a situation where the expected public key ID does not match the public key ID used for signing, we generate a new public
* key at the moment of signing (see the signature invocation). Note that this key will not match the key that we push down to the
* install plugin command.
*/
final PGPSecretKey signingKey = newSecretKey(); // the actual key used for signing
final String actualID = Long.toHexString(signingKey.getKeyID()).toUpperCase(Locale.ROOT);
final BiFunction<byte[], PGPSecretKey, String> signature = (b, p) -> signature(b, signingKey);
final PGPSecretKey verifyingKey = newSecretKey(); // the expected key used for signing
final String expectedID = Long.toHexString(verifyingKey.getKeyID()).toUpperCase(Locale.ROOT);
final IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> assertInstallPluginFromUrl(
icu,
icu,
url,
null,
false,
".sha512",
checksumAndFilename(digest, url),
verifyingKey,
signature
)
);
assertThat(e, hasToString(containsString("key id [" + actualID + "] does not match expected key id [" + expectedID + "]")));
}
public void testFailedSignatureVerification() throws Exception {
final String icu = "analysis-icu";
final String url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/analysis-icu/"
+ icu
+ "-"
+ Build.CURRENT.getQualifiedVersion()
+ ".zip";
final MessageDigest digest = MessageDigest.getInstance("SHA-512");
/*
* To setup a situation where signature verification fails, we will mutate the input byte array by modifying a single byte to some
* random byte value other than the actual value. This is enough to change the signature and cause verification to intentionally
* fail.
*/
final BiFunction<byte[], PGPSecretKey, String> signature = (b, p) -> {
final byte[] bytes = Arrays.copyOf(b, b.length);
bytes[0] = randomValueOtherThan(b[0], ESTestCase::randomByte);
return signature(bytes, p);
};
final IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> assertInstallPluginFromUrl(
icu,
icu,
url,
null,
false,
".sha512",
checksumAndFilename(digest, url),
newSecretKey(),
signature
)
);
assertThat(e, hasToString(equalTo("java.lang.IllegalStateException: signature verification for [" + url + "] failed")));
}
public PGPSecretKey newSecretKey() throws NoSuchAlgorithmException, NoSuchProviderException, PGPException {
final KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
kpg.initialize(2048);
final KeyPair pair = kpg.generateKeyPair();
final PGPDigestCalculator sha1Calc = new JcaPGPDigestCalculatorProviderBuilder().build().get(HashAlgorithmTags.SHA1);
final PGPKeyPair pkp = new JcaPGPKeyPair(PGPPublicKey.RSA_GENERAL, pair, new Date());
return new PGPSecretKey(
PGPSignature.DEFAULT_CERTIFICATION,
pkp,
"example@example.com",
sha1Calc,
null,
null,
new JcaPGPContentSignerBuilder(pkp.getPublicKey().getAlgorithm(), HashAlgorithmTags.SHA256),
new JcePBESecretKeyEncryptorBuilder(PGPEncryptedData.AES_192, sha1Calc).setProvider(new BouncyCastleFipsProvider())
.build("passphrase".toCharArray())
);
}
private Function<byte[], String> checksum(final MessageDigest digest) {
return checksumAndString(digest, "");
}
private Function<byte[], String> checksumAndFilename(final MessageDigest digest, final String url) throws MalformedURLException {
final String[] segments = URI.create(url).getPath().split("/");
return checksumAndString(digest, " " + segments[segments.length - 1]);
}
private Function<byte[], String> checksumAndString(final MessageDigest digest, final String s) {
return bytes -> MessageDigests.toHexString(digest.digest(bytes)) + s;
}
private String signature(final byte[] bytes, final PGPSecretKey secretKey) {
try {
final PGPPrivateKey privateKey = secretKey.extractPrivateKey(
new JcePBESecretKeyDecryptorBuilder(new JcaPGPDigestCalculatorProviderBuilder().build()).build("passphrase".toCharArray())
);
final PGPSignatureGenerator generator = new PGPSignatureGenerator(
new JcaPGPContentSignerBuilder(privateKey.getPublicKeyPacket().getAlgorithm(), HashAlgorithmTags.SHA512)
);
generator.init(PGPSignature.BINARY_DOCUMENT, privateKey);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
try (
BCPGOutputStream pout = new BCPGOutputStream(new ArmoredOutputStream(output));
InputStream is = new ByteArrayInputStream(bytes)
) {
final byte[] buffer = new byte[1024];
int read;
while ((read = is.read(buffer)) != -1) {
generator.update(buffer, 0, read);
}
generator.generate().encode(pout);
}
return new String(output.toByteArray(), "UTF-8");
} catch (IOException | PGPException e) {
throw new RuntimeException(e);
}
}
// checks the plugin requires a policy confirmation, and does not install when that is rejected by the user
// the plugin is installed after this method completes
private void assertPolicyConfirmation(Tuple<Path, Environment> env, String pluginZip, String... warnings) throws Exception {
for (int i = 0; i < warnings.length; ++i) {
String warning = warnings[i];
for (int j = 0; j < i; ++j) {
terminal.addTextInput("y"); // accept warnings we have already tested
}
// default answer, does not install
terminal.addTextInput("");
UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning));
try (Stream<Path> fileStream = Files.list(env.v2().pluginsFile())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
// explicitly do not install
terminal.reset();
for (int j = 0; j < i; ++j) {
terminal.addTextInput("y"); // accept warnings we have already tested
}
terminal.addTextInput("n");
e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1()));
assertEquals("installation aborted by user", e.getMessage());
assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning));
try (Stream<Path> fileStream = Files.list(env.v2().pluginsFile())) {
assertThat(fileStream.collect(Collectors.toList()), empty());
}
}
// allow installation
terminal.reset();
for (int j = 0; j < warnings.length; ++j) {
terminal.addTextInput("y");
}
installPlugin(pluginZip, env.v1());
for (String warning : warnings) {
assertThat(terminal.getErrorOutput(), containsString("WARNING: " + warning));
}
}
public void testPolicyConfirmation() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
writePluginSecurityPolicy(pluginDir, "setAccessible", "setFactory");
String pluginZip = createPluginUrl("fake", pluginDir);
assertPolicyConfirmation(env, pluginZip, "plugin requires additional permissions");
assertPlugin("fake", pluginDir, env.v2());
}
public void testPluginWithNativeController() throws Exception {
Tuple<Path, Environment> env = createEnv(fs, temp);
Path pluginDir = createPluginDir(temp);
String pluginZip = createPluginUrl("fake", pluginDir, "has.native.controller", "true");
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, env.v1()));
assertThat(e, hasToString(containsString("plugins can not have native controllers")));
}
}
| |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.javascript.tests;
import java.io.File;
import java.io.FileFilter;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mozilla.javascript.drivers.ShellTest;
import org.mozilla.javascript.drivers.StandardTests;
import org.mozilla.javascript.drivers.TestUtils;
import org.mozilla.javascript.tools.shell.ShellContextFactory;
/**
* This JUnit suite runs the Mozilla test suite (in mozilla.org CVS
* at /mozilla/js/tests).
*
* Not all tests in the suite are run. Since the mozilla.org tests are
* designed and maintained for the SpiderMonkey engine, tests in the
* suite may not pass due to feature set differences and known bugs.
* To make sure that this unit test is stable in the midst of changes
* to the mozilla.org suite, we maintain a list of passing tests in
* files opt-1.tests, opt0.tests, and opt9.tests. This class also
* implements the ability to run skipped tests, see if any pass, and
* print out a script to modify the *.tests files.
* (This approach doesn't handle breaking changes to existing passing
* tests, but in practice that has been very rare.)
*/
@RunWith(Parameterized.class)
public class MozillaSuiteTest {
private final File jsFile;
private final int optimizationLevel;
static final int[] OPT_LEVELS = { -1, 0, 9 };
public MozillaSuiteTest(File jsFile, int optimizationLevel) {
this.jsFile = jsFile;
this.optimizationLevel = optimizationLevel;
}
public static File getTestDir() throws IOException {
File testDir = null;
if (System.getProperty("mozilla.js.tests") != null) {
testDir = new File(System.getProperty("mozilla.js.tests"));
} else {
URL url = StandardTests.class.getResource(".");
String path = url.getFile();
int jsIndex = path.lastIndexOf("/js");
if (jsIndex == -1) {
throw new IllegalStateException("You aren't running the tests "+
"from within the standard mozilla/js directory structure");
}
path = path.substring(0, jsIndex + 3).replace('/', File.separatorChar);
path = path.replace("%20", " ");
testDir = new File(path, "tests");
}
if (!testDir.isDirectory()) {
throw new FileNotFoundException(testDir + " is not a directory");
}
return testDir;
}
public static String getTestFilename(int optimizationLevel) {
return "opt" + optimizationLevel + ".tests";
}
public static File[] getTestFiles(int optimizationLevel) throws IOException {
File testDir = getTestDir();
String[] tests = TestUtils.loadTestsFromResource(
"/" + getTestFilename(optimizationLevel), null);
Arrays.sort(tests);
File[] files = new File[tests.length];
for (int i=0; i < files.length; i++) {
files[i] = new File(testDir, tests[i]);
}
return files;
}
public static String loadFile(File f) throws IOException {
int length = (int) f.length(); // don't worry about very long files
char[] buf = new char[length];
new FileReader(f).read(buf, 0, length);
return new String(buf);
}
@Parameters
public static Collection<Object[]> mozillaSuiteValues() throws IOException {
List<Object[]> result = new ArrayList<Object[]>();
int[] optLevels = OPT_LEVELS;
for (int i=0; i < optLevels.length; i++) {
File[] tests = getTestFiles(optLevels[i]);
for (File f : tests) {
result.add(new Object[] { f, optLevels[i] });
}
}
return result;
}
// move "@Parameters" to this method to test a single Mozilla test
public static Collection<Object[]> singleDoctest() throws IOException {
final String SINGLE_TEST_FILE = "e4x/Expressions/11.1.1.js";
final int SINGLE_TEST_OPTIMIZATION_LEVEL = -1;
List<Object[]> result = new ArrayList<Object[]>();
File f = new File(getTestDir(), SINGLE_TEST_FILE);
result.add(new Object[] { f, SINGLE_TEST_OPTIMIZATION_LEVEL });
return result;
}
private static class ShellTestParameters extends ShellTest.Parameters {
@Override
public int getTimeoutMilliseconds() {
if (System.getProperty("mozilla.js.tests.timeout") != null) {
return Integer.parseInt(System.getProperty(
"mozilla.js.tests.timeout"));
}
return 10000;
}
}
private static class JunitStatus extends ShellTest.Status {
File file;
@Override
public final void running(File jsFile) {
// remember file in case we fail
file = jsFile;
}
@Override
public final void failed(String s) {
// Include test source in message, this is the only way
// to locate the test in a Parameterized JUnit test
String msg = "In \"" + file + "\":" +
System.getProperty("line.separator") + s;
System.out.println(msg);
Assert.fail(msg);
}
@Override
public final void exitCodesWere(int expected, int actual) {
Assert.assertEquals("Unexpected exit code", expected, actual);
}
@Override
public final void outputWas(String s) {
// Do nothing; we don't want to see the output when running JUnit
// tests.
}
@Override
public final void threw(Throwable t) {
Assert.fail(ShellTest.getStackTrace(t));
}
@Override
public final void timedOut() {
failed("Timed out.");
}
}
@Test
public void runMozillaTest() throws Exception {
//System.out.println("Test \"" + jsFile + "\" running under optimization level " + optimizationLevel);
final ShellContextFactory shellContextFactory =
new ShellContextFactory();
shellContextFactory.setOptimizationLevel(optimizationLevel);
ShellTestParameters params = new ShellTestParameters();
JunitStatus status = new JunitStatus();
ShellTest.run(shellContextFactory, jsFile, params, status);
}
/**
* The main class will run all the test files that are *not* covered in
* the *.tests files, and print out a list of all the tests that pass.
*/
public static void main(String[] args) throws IOException {
PrintStream out = new PrintStream("fix-tests-files.sh");
try {
for (int i=0; i < OPT_LEVELS.length; i++) {
int optLevel = OPT_LEVELS[i];
File testDir = getTestDir();
File[] allTests =
TestUtils.recursiveListFiles(testDir,
new FileFilter() {
public boolean accept(File pathname)
{
return ShellTest.DIRECTORY_FILTER.accept(pathname) ||
ShellTest.TEST_FILTER.accept(pathname);
}
});
HashSet<File> diff = new HashSet<File>(Arrays.asList(allTests));
File testFiles[] = getTestFiles(optLevel);
diff.removeAll(Arrays.asList(testFiles));
ArrayList<String> skippedPassed = new ArrayList<String>();
int absolutePathLength = testDir.getAbsolutePath().length() + 1;
for (File testFile: diff) {
try {
(new MozillaSuiteTest(testFile, optLevel)).runMozillaTest();
// strip off testDir
String canonicalized =
testFile.getAbsolutePath().substring(absolutePathLength);
canonicalized = canonicalized.replace('\\', '/');
skippedPassed.add(canonicalized);
} catch (Throwable t) {
// failed, so skip
}
}
// "skippedPassed" now contains all the tests that are currently
// skipped but now pass. Print out shell commands to update the
// appropriate *.tests file.
if (skippedPassed.size() > 0) {
out.println("cat >> " + getTestFilename(optLevel) + " <<EOF");
String[] sorted = skippedPassed.toArray(new String[0]);
Arrays.sort(sorted);
for (int j=0; j < sorted.length; j++) {
out.println(sorted[j]);
}
out.println("EOF");
}
}
System.out.println("Done.");
} finally {
out.close();
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flex.forks.batik.gvt;
import java.awt.Shape;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.flex.forks.batik.gvt.event.GraphicsNodeChangeAdapter;
import org.apache.flex.forks.batik.gvt.event.GraphicsNodeChangeEvent;
import org.apache.flex.forks.batik.ext.awt.image.renderable.Filter;
/**
* This class tracks the changes on a GVT tree
*
* @author <a href="mailto:Thomas.DeWeeese@Kodak.com">Thomas DeWeese</a>
* @version $Id: UpdateTracker.java 479559 2006-11-27 09:46:16Z dvholten $
*/
public class UpdateTracker extends GraphicsNodeChangeAdapter {
Map dirtyNodes = null;
Map fromBounds = new HashMap();
protected static Rectangle2D NULL_RECT = new Rectangle();
public UpdateTracker(){
}
/**
* Tells whether the GVT tree has changed.
*/
public boolean hasChanged() {
return (dirtyNodes != null);
}
/**
* Returns the list of dirty areas on GVT.
*/
public List getDirtyAreas() {
if (dirtyNodes == null)
return null;
List ret = new LinkedList();
Set keys = dirtyNodes.keySet();
Iterator i = keys.iterator();
while (i.hasNext()) {
WeakReference gnWRef = (WeakReference)i.next();
GraphicsNode gn = (GraphicsNode)gnWRef.get();
// GraphicsNode srcGN = gn;
// if the weak ref has been cleared then this node is no
// longer part of the GVT tree (and the change should be
// reflected in some ancestor that should also be in the
// dirty list).
if (gn == null) continue;
AffineTransform oat;
oat = (AffineTransform)dirtyNodes.get(gnWRef);
if (oat != null){
oat = new AffineTransform(oat);
}
Rectangle2D srcORgn = (Rectangle2D)fromBounds.remove(gnWRef);
Rectangle2D srcNRgn = null;
AffineTransform nat = null;
if (!(srcORgn instanceof ChngSrcRect)) {
// For change srcs don't use the new bounds of parent node.
srcNRgn = gn.getBounds();
nat = gn.getTransform();
if (nat != null)
nat = new AffineTransform(nat);
}
// System.out.println("Rgns: " + srcORgn + " - " + srcNRgn);
// System.out.println("ATs: " + oat + " - " + nat);
do {
// f.invalidateCache(oRng);
// f.invalidateCache(nRng);
// f = gn.getEnableBackgroundGraphicsNodeRable(false);
// (need to push rgn through filter chain if any...)
// f.invalidateCache(oRng);
// f.invalidateCache(nRng);
gn = gn.getParent();
if (gn == null)
break; // We reached the top of the tree
Filter f= gn.getFilter();
if ( f != null) {
srcNRgn = f.getBounds2D();
nat = null;
}
// Get the parent's current Affine
AffineTransform at = gn.getTransform();
// Get the parent's Affine last time we rendered.
gnWRef = gn.getWeakReference();
AffineTransform poat = (AffineTransform)dirtyNodes.get(gnWRef);
if (poat == null) poat = at;
if (poat != null) {
if (oat != null)
oat.preConcatenate(poat);
else
oat = new AffineTransform(poat);
}
if (at != null){
if (nat != null)
nat.preConcatenate(at);
else
nat = new AffineTransform(at);
}
} while (true);
if (gn == null) {
// We made it to the root graphics node so add them.
// System.out.println
// ("Adding: " + oat + " - " + nat + "\n" +
// srcORgn + "\n" + srcNRgn + "\n");
// <!>
Shape oRgn = srcORgn;
if ((oRgn != null) && (oRgn != NULL_RECT)) {
if (oat != null)
oRgn = oat.createTransformedShape(srcORgn);
// System.err.println("GN: " + srcGN);
// System.err.println("Src: " + oRgn.getBounds2D());
ret.add(oRgn);
}
if (srcNRgn != null) {
Shape nRgn = srcNRgn;
if (nat != null)
nRgn = nat.createTransformedShape(srcNRgn);
if (nRgn != null)
ret.add(nRgn);
}
}
}
fromBounds.clear();
dirtyNodes.clear();
return ret;
}
/**
* This returns the dirty region for gn in the coordinate system
* given by <code>at</code>.
* @param gn Node tree to return dirty region for.
* @param at Affine transform to coordinate space to accumulate
* dirty regions in.
*/
public Rectangle2D getNodeDirtyRegion(GraphicsNode gn,
AffineTransform at) {
WeakReference gnWRef = gn.getWeakReference();
AffineTransform nat = (AffineTransform)dirtyNodes.get(gnWRef);
if (nat == null) nat = gn.getTransform();
if (nat != null) {
at = new AffineTransform(at);
at.concatenate(nat);
}
Filter f= gn.getFilter();
Rectangle2D ret = null;
if (gn instanceof CompositeGraphicsNode) {
CompositeGraphicsNode cgn = (CompositeGraphicsNode)gn;
Iterator iter = cgn.iterator();
while (iter.hasNext()) {
GraphicsNode childGN = (GraphicsNode)iter.next();
Rectangle2D r2d = getNodeDirtyRegion(childGN, at);
if (r2d != null) {
if (f != null) {
// If we have a filter and a change region
// Update our full filter extents.
Shape s = at.createTransformedShape(f.getBounds2D());
ret = s.getBounds2D();
break;
}
if ((ret == null) || (ret == NULL_RECT)) ret = r2d;
//else ret = ret.createUnion(r2d);
else ret.add(r2d);
}
}
} else {
ret = (Rectangle2D)fromBounds.remove(gnWRef);
if (ret == null) {
if (f != null) ret = f.getBounds2D();
else ret = gn.getBounds();
} else if (ret == NULL_RECT)
ret = null;
if (ret != null)
ret = at.createTransformedShape(ret).getBounds2D();
}
return ret;
}
public Rectangle2D getNodeDirtyRegion(GraphicsNode gn) {
return getNodeDirtyRegion(gn, new AffineTransform());
}
/**
* Receives notification of a change to a GraphicsNode.
* @param gnce The event object describing the GraphicsNode change.
*/
public void changeStarted(GraphicsNodeChangeEvent gnce) {
// System.out.println("A node has changed for: " + this);
GraphicsNode gn = gnce.getGraphicsNode();
WeakReference gnWRef = gn.getWeakReference();
boolean doPut = false;
if (dirtyNodes == null) {
dirtyNodes = new HashMap();
doPut = true;
} else if (!dirtyNodes.containsKey(gnWRef))
doPut = true;
if (doPut) {
AffineTransform at = gn.getTransform();
if (at != null) at = (AffineTransform)at.clone();
else at = new AffineTransform();
dirtyNodes.put(gnWRef, at);
}
GraphicsNode chngSrc = gnce.getChangeSrc();
Rectangle2D rgn = null;
if (chngSrc != null) {
// A child node is moving in the tree so assign it's dirty
// regions to this node before it moves.
Rectangle2D drgn = getNodeDirtyRegion(chngSrc);
if (drgn != null)
rgn = new ChngSrcRect(drgn);
} else {
// Otherwise just use gn's current region.
rgn = gn.getBounds();
}
// Add this dirty region to any existing dirty region.
Rectangle2D r2d = (Rectangle2D)fromBounds.remove(gnWRef);
if (rgn != null) {
if ((r2d != null) && (r2d != NULL_RECT)) {
// System.err.println("GN: " + gn);
// System.err.println("R2d: " + r2d);
// System.err.println("Rgn: " + rgn);
//r2d = r2d.createUnion(rgn);
r2d.add(rgn);
// System.err.println("Union: " + r2d);
}
else r2d = rgn;
}
// if ((gn instanceof CompositeGraphicsNode) &&
// (r2d.getWidth() > 200)) {
// new Exception("Adding Large: " + gn).printStackTrace();
// }
// Store the bounds for the future.
if (r2d == null)
r2d = NULL_RECT;
fromBounds.put(gnWRef, r2d);
}
class ChngSrcRect extends Rectangle2D.Float {
ChngSrcRect(Rectangle2D r2d) {
super((float)r2d.getX(), (float)r2d.getY(),
(float)r2d.getWidth(), (float)r2d.getHeight());
}
}
/**
* Clears the tracker.
*/
public void clear() {
dirtyNodes = null;
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.security.wycheproof;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Set;
import java.util.TreeSet;
import javax.crypto.Cipher;
import javax.crypto.NoSuchPaddingException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* RSA encryption tests
*
* @author bleichen@google.com (Daniel Bleichenbacher)
*/
@RunWith(JUnit4.class)
public class RsaEncryptionTest {
/**
* Providers that implement RSA with PKCS1Padding but not OAEP are outdated and should be avoided
* even if RSA is currently not used in a project. Such providers promote using an insecure
* cipher. There is a great danger that PKCS1Padding is used as a temporary workaround, but later
* stays in the project for much longer than necessary.
*/
@Test
public void testOutdatedProvider() throws Exception {
try {
Cipher c = Cipher.getInstance("RSA/ECB/PKCS1Padding");
try {
Cipher.getInstance("RSA/ECB/OAEPWITHSHA-1ANDMGF1PADDING");
} catch (NoSuchPaddingException | NoSuchAlgorithmException ex) {
fail("Provider " + c.getProvider().getName() + " is outdated and should not be used.");
}
} catch (NoSuchPaddingException | NoSuchAlgorithmException ex) {
System.out.println("RSA/ECB/PKCS1Padding is not implemented");
}
}
/**
* Get a PublicKey from a JsonObject.
*
* <p>object contains the key in multiple formats: "key" : elements of the public key "keyDer":
* the key in ASN encoding encoded hexadecimal "keyPem": the key in Pem format encoded hexadecimal
* The test can use the format that is most convenient.
*/
// This is a false positive, since errorprone cannot track values passed into a method.
@SuppressWarnings("InsecureCryptoUsage")
protected static PrivateKey getPrivateKey(JsonObject object) throws Exception {
KeyFactory kf;
kf = KeyFactory.getInstance("RSA");
byte[] encoded = TestUtil.hexToBytes(object.get("privateKeyPkcs8").getAsString());
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(encoded);
return kf.generatePrivate(keySpec);
}
/** Convenience method to get a byte array from a JsonObject */
protected static byte[] getBytes(JsonObject object, String name) throws Exception {
return JsonUtil.asByteArray(object.get(name));
}
/**
* Tries decrypting RSA-PKCS #1 v 1.5 encrypted ciphertext.
* RSA-PKCS #1 v 1.5 is susceptible to chosen ciphertext attacks. The seriousness of the
* attack depends on how much information is leaked when decrypting an invalid ciphertext.
* The test vectors with invalid padding contain a flag "InvalidPkcs1Padding".
* The test below expects that all test vectors with this flag throw an indistinguishable
* exception.
*
* <p><b>References:</b>
*
* <ul>
* <li>Bleichenbacher, "Chosen ciphertext attacks against protocols based on the RSA encryption
* standard PKCS# 1" Crypto 98
* <li>Manger, "A chosen ciphertext attack on RSA optimal asymmetric encryption padding (OAEP)
* as standardized in PKCS# 1 v2.0", Crypto 2001 This paper shows that OAEP is susceptible
* to a chosen ciphertext attack if error messages distinguish between different failure
* condidtions.
* <li>Bardou, Focardi, Kawamoto, Simionato, Steel, Tsay "Efficient Padding Oracle Attacks on
* Cryptographic Hardware", Crypto 2012 The paper shows that small differences on what
* information an attacker receives can make a big difference on the number of chosen
* message necessary for an attack.
* <li>Smart, "Errors matter: Breaking RSA-based PIN encryption with thirty ciphertext validity
* queries" RSA conference, 2010 This paper shows that padding oracle attacks can be
* successful with even a small number of queries.
* </ul>
*
* <p><b>Some recent bugs:</b> CVE-2012-5081: Java JSSE provider leaked information through
* exceptions and timing. Both the PKCS #1 padding and the OAEP padding were broken:
* http://www-brs.ub.ruhr-uni-bochum.de/netahtml/HSS/Diss/MeyerChristopher/diss.pdf
*
* <p><b>What this test does not (yet) cover:</b>
*
* <ul>
* <li>A previous version of one of the provider leaked the block type. (when was this fixed?)
* <li>Some attacks require a large number of ciphertexts to be detected if random ciphertexts
* are used. Such problems require specifically crafted ciphertexts to run in a unit test.
* E.g. "Attacking RSA-based Sessions in SSL/TLS" by V. Klima, O. Pokorny, and T. Rosa:
* https://eprint.iacr.org/2003/052/
* <li>Timing leakages because of differences in parsing the padding (e.g. CVE-2015-7827) Such
* differences are too small to be reliably detectable in unit tests.
* </ul>
*/
@SuppressWarnings("InsecureCryptoUsage")
public void testDecryption(String filename) throws Exception {
final String expectedSchema = "rsaes_pkcs1_decrypt_schema.json";
JsonObject test = JsonUtil.getTestVectors(filename);
String schema = test.get("schema").getAsString();
if (!schema.equals(expectedSchema)) {
System.out.println(
"Expecting test vectors with schema "
+ expectedSchema
+ " found vectors with schema "
+ schema);
}
// Padding oracle attacks become simpler when the decryption leaks detailed information about
// invalid paddings. Hence implementations are expected to not include such information in the
// exception thrown in the case of an invalid padding.
// Test vectors with an invalid padding have a flag "InvalidPkcs1Padding".
// Invalid test vectors without this flag are cases where the error are detected before
// the ciphertext is decrypted, e.g. if the size of the ciphertext is incorrect.
final String invalidPkcs1Padding = "InvalidPkcs1Padding";
Set<String> exceptions = new TreeSet<String>();
int errors = 0;
Cipher decrypter = Cipher.getInstance("RSA/ECB/PKCS1Padding");
for (JsonElement g : test.getAsJsonArray("testGroups")) {
JsonObject group = g.getAsJsonObject();
PrivateKey key = getPrivateKey(group);
for (JsonElement t : group.getAsJsonArray("tests")) {
JsonObject testcase = t.getAsJsonObject();
int tcid = testcase.get("tcId").getAsInt();
String messageHex = TestUtil.bytesToHex(getBytes(testcase, "msg"));
byte[] ciphertext = getBytes(testcase, "ct");
String ciphertextHex = TestUtil.bytesToHex(ciphertext);
String result = testcase.get("result").getAsString();
decrypter.init(Cipher.DECRYPT_MODE, key);
byte[] decrypted = null;
String exception = "";
try {
decrypted = decrypter.doFinal(ciphertext);
} catch (Exception ex) {
// TODO(bleichen): The exception thrown should always be
// a GeneralSecurityException.
// However, BouncyCastle throws some non-conforming exceptions.
// For the moment we do not count this as a problem to avoid that
// more serious bugs remain hidden. In particular, the test expects
// that all ciphertexts with an invalid padding throw the same
// indistinguishable exception.
decrypted = null;
exception = ex.toString();
for (JsonElement flag : testcase.getAsJsonArray("flags")) {
if (flag.getAsString().equals(invalidPkcs1Padding)) {
exceptions.add(exception);
break;
}
}
}
if (decrypted == null && result.equals("valid")) {
System.out.printf(
"Valid ciphertext not decrypted. filename:%s tcId:%d ct:%s cause:%s\n",
filename, tcid, ciphertextHex, exception);
errors++;
} else if (decrypted != null) {
String decryptedHex = TestUtil.bytesToHex(decrypted);
if (result.equals("invalid")) {
System.out.printf(
"Invalid ciphertext decrypted. filename:%s tcId:%d expected:%s decrypted:%s\n",
filename, tcid, messageHex, decryptedHex);
errors++;
} else if (!decryptedHex.equals(messageHex)) {
System.out.printf(
"Incorrect decryption. filename:%s tcId:%d expected:%s decrypted:%s\n",
filename, tcid, messageHex, decryptedHex);
errors++;
}
}
}
}
if (exceptions.size() != 1) {
System.out.println("Exceptions for RSA/ECB/PKCS1Padding");
for (String s : exceptions) {
System.out.println(s);
}
fail("Exceptions leak information about the padding");
}
assertEquals(0, errors);
}
@Test
public void testDecryption2048() throws Exception {
testDecryption("rsa_pkcs1_2048_test.json");
}
@Test
public void testDecryption3072() throws Exception {
testDecryption("rsa_pkcs1_3072_test.json");
}
@Test
public void testDecryption4096() throws Exception {
testDecryption("rsa_pkcs1_4096_test.json");
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server.am;
import android.app.IActivityManager.ContentProviderHolder;
import android.content.ComponentName;
import android.content.IContentProvider;
import android.content.pm.ApplicationInfo;
import android.content.pm.ProviderInfo;
import android.os.IBinder;
import android.os.IBinder.DeathRecipient;
import android.os.Process;
import android.os.RemoteException;
import android.os.UserHandle;
import android.util.Slog;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
final class ContentProviderRecord {
final ActivityManagerService service;
public final ProviderInfo info;
final int uid;
final ApplicationInfo appInfo;
final ComponentName name;
final boolean singleton;
public IContentProvider provider;
public boolean noReleaseNeeded;
// All attached clients
final ArrayList<ContentProviderConnection> connections
= new ArrayList<ContentProviderConnection>();
//final HashSet<ProcessRecord> clients = new HashSet<ProcessRecord>();
// Handles for non-framework processes supported by this provider
HashMap<IBinder, ExternalProcessHandle> externalProcessTokenToHandle;
// Count for external process for which we have no handles.
int externalProcessNoHandleCount;
ProcessRecord proc; // if non-null, hosting process.
ProcessRecord launchingApp; // if non-null, waiting for this app to be launched.
String stringName;
String shortStringName;
public ContentProviderRecord(ActivityManagerService _service, ProviderInfo _info,
ApplicationInfo ai, ComponentName _name, boolean _singleton) {
service = _service;
info = _info;
uid = ai.uid;
appInfo = ai;
name = _name;
singleton = _singleton;
noReleaseNeeded = uid == 0 || uid == Process.SYSTEM_UID;
}
public ContentProviderRecord(ContentProviderRecord cpr) {
service = cpr.service;
info = cpr.info;
uid = cpr.uid;
appInfo = cpr.appInfo;
name = cpr.name;
singleton = cpr.singleton;
noReleaseNeeded = cpr.noReleaseNeeded;
}
public ContentProviderHolder newHolder(ContentProviderConnection conn) {
ContentProviderHolder holder = new ContentProviderHolder(info);
holder.provider = provider;
holder.noReleaseNeeded = noReleaseNeeded;
holder.connection = conn;
return holder;
}
public boolean canRunHere(ProcessRecord app) {
return (info.multiprocess || info.processName.equals(app.processName))
&& uid == app.info.uid;
}
public void addExternalProcessHandleLocked(IBinder token) {
if (token == null) {
externalProcessNoHandleCount++;
} else {
if (externalProcessTokenToHandle == null) {
externalProcessTokenToHandle = new HashMap<IBinder, ExternalProcessHandle>();
}
ExternalProcessHandle handle = externalProcessTokenToHandle.get(token);
if (handle == null) {
handle = new ExternalProcessHandle(token);
externalProcessTokenToHandle.put(token, handle);
}
handle.mAcquisitionCount++;
}
}
public boolean removeExternalProcessHandleLocked(IBinder token) {
if (hasExternalProcessHandles()) {
boolean hasHandle = false;
if (externalProcessTokenToHandle != null) {
ExternalProcessHandle handle = externalProcessTokenToHandle.get(token);
if (handle != null) {
hasHandle = true;
handle.mAcquisitionCount--;
if (handle.mAcquisitionCount == 0) {
removeExternalProcessHandleInternalLocked(token);
return true;
}
}
}
if (!hasHandle) {
externalProcessNoHandleCount--;
return true;
}
}
return false;
}
private void removeExternalProcessHandleInternalLocked(IBinder token) {
ExternalProcessHandle handle = externalProcessTokenToHandle.get(token);
handle.unlinkFromOwnDeathLocked();
externalProcessTokenToHandle.remove(token);
if (externalProcessTokenToHandle.size() == 0) {
externalProcessTokenToHandle = null;
}
}
public boolean hasExternalProcessHandles() {
return (externalProcessTokenToHandle != null || externalProcessNoHandleCount > 0);
}
void dump(PrintWriter pw, String prefix, boolean full) {
if (full) {
pw.print(prefix); pw.print("package=");
pw.print(info.applicationInfo.packageName);
pw.print(" process="); pw.println(info.processName);
}
pw.print(prefix); pw.print("proc="); pw.println(proc);
if (launchingApp != null) {
pw.print(prefix); pw.print("launchingApp="); pw.println(launchingApp);
}
if (full) {
pw.print(prefix); pw.print("uid="); pw.print(uid);
pw.print(" provider="); pw.println(provider);
}
if (singleton) {
pw.print(prefix); pw.print("singleton="); pw.println(singleton);
}
pw.print(prefix); pw.print("authority="); pw.println(info.authority);
if (full) {
if (info.isSyncable || info.multiprocess || info.initOrder != 0) {
pw.print(prefix); pw.print("isSyncable="); pw.print(info.isSyncable);
pw.print(" multiprocess="); pw.print(info.multiprocess);
pw.print(" initOrder="); pw.println(info.initOrder);
}
}
if (full) {
if (hasExternalProcessHandles()) {
pw.print(prefix); pw.print("externals=");
pw.println(externalProcessTokenToHandle.size());
}
} else {
if (connections.size() > 0 || externalProcessNoHandleCount > 0) {
pw.print(prefix); pw.print(connections.size());
pw.print(" connections, "); pw.print(externalProcessNoHandleCount);
pw.println(" external handles");
}
}
if (connections.size() > 0) {
if (full) {
pw.print(prefix); pw.println("Connections:");
}
for (int i=0; i<connections.size(); i++) {
ContentProviderConnection conn = connections.get(i);
pw.print(prefix); pw.print(" -> "); pw.println(conn.toClientString());
if (conn.provider != this) {
pw.print(prefix); pw.print(" *** WRONG PROVIDER: ");
pw.println(conn.provider);
}
}
}
}
@Override
public String toString() {
if (stringName != null) {
return stringName;
}
StringBuilder sb = new StringBuilder(128);
sb.append("ContentProviderRecord{");
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append(" u");
sb.append(UserHandle.getUserId(uid));
sb.append(' ');
sb.append(name.flattenToShortString());
sb.append('}');
return stringName = sb.toString();
}
public String toShortString() {
if (shortStringName != null) {
return shortStringName;
}
StringBuilder sb = new StringBuilder(128);
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append('/');
sb.append(name.flattenToShortString());
return shortStringName = sb.toString();
}
// This class represents a handle from an external process to a provider.
private class ExternalProcessHandle implements DeathRecipient {
private static final String LOG_TAG = "ExternalProcessHanldle";
private final IBinder mToken;
private int mAcquisitionCount;
public ExternalProcessHandle(IBinder token) {
mToken = token;
try {
token.linkToDeath(this, 0);
} catch (RemoteException re) {
Slog.e(LOG_TAG, "Couldn't register for death for token: " + mToken, re);
}
}
public void unlinkFromOwnDeathLocked() {
mToken.unlinkToDeath(this, 0);
}
@Override
public void binderDied() {
synchronized (service) {
if (hasExternalProcessHandles() &&
externalProcessTokenToHandle.get(mToken) != null) {
removeExternalProcessHandleInternalLocked(mToken);
}
}
}
}
}
| |
package com.vaadin.tests.components.abstractfield;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import com.vaadin.tests.components.AbstractComponentTest;
import com.vaadin.ui.MenuBar;
import com.vaadin.ui.MenuBar.MenuItem;
import com.vaadin.v7.data.Property;
import com.vaadin.v7.data.Property.ReadOnlyStatusChangeEvent;
import com.vaadin.v7.data.Property.ReadOnlyStatusChangeListener;
import com.vaadin.v7.data.Property.ValueChangeListener;
import com.vaadin.v7.ui.AbstractField;
public abstract class LegacyAbstractFieldTest<T extends AbstractField>
extends AbstractComponentTest<T>
implements ValueChangeListener, ReadOnlyStatusChangeListener {
private boolean sortValueChanges = true;
@Override
protected void createActions() {
super.createActions();
createBooleanAction("Required", CATEGORY_STATE, false, requiredCommand);
createRequiredErrorSelect(CATEGORY_DECORATIONS);
createValueChangeListener(CATEGORY_LISTENERS);
createReadOnlyStatusChangeListener(CATEGORY_LISTENERS);
// * invalidcommitted
// * commit()
// * discard()
// * writethrough
// * readthrough
// * addvalidator
// * isvalid
// * invalidallowed
// * error indicator
//
// * validation visible
// * ShortcutListener
}
@Override
protected void populateSettingsMenu(MenuItem settingsMenu) {
super.populateSettingsMenu(settingsMenu);
if (AbstractField.class.isAssignableFrom(getTestClass())) {
MenuItem abstractField = settingsMenu.addItem("LegacyAbstractField",
null);
abstractField.addItem("Show value", new MenuBar.Command() {
@Override
public void menuSelected(MenuItem selectedItem) {
for (T a : getTestComponents()) {
log(a.getClass().getSimpleName() + " value: "
+ getValue(a));
}
}
});
MenuItem sortValueChangesItem = abstractField.addItem(
"Show sorted value changes", new MenuBar.Command() {
@Override
public void menuSelected(MenuItem selectedItem) {
sortValueChanges = selectedItem.isChecked();
log("Show sorted value changes: "
+ sortValueChanges);
}
});
sortValueChangesItem.setCheckable(true);
sortValueChangesItem.setChecked(true);
}
}
private void createRequiredErrorSelect(String category) {
LinkedHashMap<String, String> options = new LinkedHashMap<>();
options.put("-", null);
options.put(TEXT_SHORT, TEXT_SHORT);
options.put("Medium", TEXT_MEDIUM);
options.put("Long", TEXT_LONG);
options.put("Very long", TEXT_VERY_LONG);
createSelectAction("Required error message", category, options, "-",
requiredErrorMessageCommand);
}
private void createValueChangeListener(String category) {
createBooleanAction("Value change listener", category, false,
valueChangeListenerCommand);
}
private void createReadOnlyStatusChangeListener(String category) {
createBooleanAction("Read only status change listener", category, false,
readonlyStatusChangeListenerCommand);
}
protected Command<T, Boolean> valueChangeListenerCommand = new Command<T, Boolean>() {
@Override
public void execute(T c, Boolean value, Object data) {
if (value) {
c.addListener(
(ValueChangeListener) LegacyAbstractFieldTest.this);
} else {
c.removeListener(
(ValueChangeListener) LegacyAbstractFieldTest.this);
}
}
};
protected Command<T, Boolean> readonlyStatusChangeListenerCommand = new Command<T, Boolean>() {
@Override
public void execute(T c, Boolean value, Object data) {
if (value) {
c.addListener(
(ReadOnlyStatusChangeListener) LegacyAbstractFieldTest.this);
} else {
c.removeListener(
(ReadOnlyStatusChangeListener) LegacyAbstractFieldTest.this);
}
}
};
protected Command<T, Object> setValueCommand = new Command<T, Object>() {
@Override
public void execute(T c, Object value, Object data) {
c.setValue(value);
}
};
@Override
public void valueChange(
com.vaadin.v7.data.Property.ValueChangeEvent event) {
log(event.getClass().getSimpleName() + ", new value: "
+ getValue(event.getProperty()));
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private String getValue(Property property) {
Object o = property.getValue();
if (o instanceof Collection && sortValueChanges) {
// Sort collections to avoid problems with values printed in
// different order
try {
List<Comparable> c = new ArrayList<Comparable>((Collection) o);
Collections.sort(c);
o = c;
} catch (Exception e) {
// continue with unsorted if sorting fails for some reason
log("Exception while sorting value: " + e.getMessage());
}
}
// Distinguish between null and 'null'
String value = "null";
if (o != null) {
if (o instanceof Date) {
Date d = (Date) o;
// Dec 31, 2068 23:09:26.531
String pattern = "MMM d, yyyy HH:mm:ss.SSS";
SimpleDateFormat format = new SimpleDateFormat(pattern,
new Locale("en", "US"));
value = format.format(d);
} else {
value = "'" + o + "'";
}
}
return value;
}
@Override
public void readOnlyStatusChange(ReadOnlyStatusChangeEvent event) {
log(event.getClass().getSimpleName());
}
protected void createSetTextValueAction(String category) {
String subCategory = "Set text value";
createCategory(subCategory, category);
List<String> values = new ArrayList<>();
values.add("Test");
values.add("A little longer value");
values.add(
"A very long value with very much text. All in all it is 74 characters long");
createClickAction("(empty string)", subCategory, setValueCommand, "");
createClickAction("(null)", subCategory, setValueCommand, null);
for (String value : values) {
createClickAction(value, subCategory, setValueCommand, value);
}
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyString;
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import com.dd.plist.NSDictionary;
import com.dd.plist.NSNumber;
import com.dd.plist.NSString;
import com.dd.plist.PropertyListParser;
import com.facebook.buck.apple.toolchain.ApplePlatform;
import com.facebook.buck.cxx.toolchain.LinkerMapMode;
import com.facebook.buck.cxx.toolchain.StripStyle;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.TestProjectFilesystems;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.testutil.integration.BuckBuildLog;
import com.facebook.buck.testutil.integration.FakeAppleDeveloperEnvironment;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.DefaultProcessExecutor;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class AppleBundleIntegrationTest {
@Rule public TemporaryPaths tmp = new TemporaryPaths();
@Rule public ExpectedException thrown = ExpectedException.none();
private ProjectFilesystem filesystem;
@Before
public void setUp() throws InterruptedException {
filesystem = TestProjectFilesystems.createProjectFilesystem(tmp.getRoot());
assumeTrue(Platform.detect() == Platform.MACOS);
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
}
private boolean checkCodeSigning(Path absoluteBundlePath)
throws IOException, InterruptedException {
if (!Files.exists(absoluteBundlePath)) {
throw new NoSuchFileException(absoluteBundlePath.toString());
}
return CodeSigning.hasValidSignature(
new DefaultProcessExecutor(new TestConsole()), absoluteBundlePath);
}
private void runSimpleApplicationBundleTestWithBuildTarget(String fqtn)
throws IOException, InterruptedException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_no_debug", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget(fqtn);
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(target.getShortName())));
assertTrue(checkCodeSigning(appPath));
// Non-Swift target shouldn't include Frameworks/
assertFalse(Files.exists(appPath.resolve("Frameworks")));
}
@Test
public void testDisablingBundleCaching() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_no_debug", tmp);
workspace.setUp();
String target = "//:DemoApp#iphonesimulator-x86_64,no-debug,no-include-frameworks";
workspace.enableDirCache();
workspace.runBuckBuild("-c", "apple.cache_bundles_and_packages=false", target).assertSuccess();
workspace.runBuckCommand("clean", "--keep-cache");
workspace.runBuckBuild("-c", "apple.cache_bundles_and_packages=false", target).assertSuccess();
workspace.getBuildLog().assertTargetBuiltLocally(target);
}
@Test
public void simpleApplicationBundle() throws IOException, InterruptedException {
runSimpleApplicationBundleTestWithBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
}
@Test
public void simpleApplicationBundleWithLinkerMapDoesNotAffectOutput()
throws IOException, InterruptedException {
runSimpleApplicationBundleTestWithBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
}
@Test
public void simpleApplicationBundleWithoutLinkerMapDoesNotAffectOutput()
throws IOException, InterruptedException {
runSimpleApplicationBundleTestWithBuildTarget(
"//:DemoApp#iphonesimulator-x86_64,no-debug,no-linkermap");
}
@Test
public void simpleApplicationBundleWithCodeSigning() throws Exception {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_codesigning", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphoneos-arm64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(target.getShortName())));
assertTrue(checkCodeSigning(appPath));
// Do not match iOS profiles on tvOS targets.
target = workspace.newBuildTarget("//:DemoApp#appletvos-arm64,no-debug");
ProcessResult result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertFailure();
assertTrue(result.getStderr().contains("No valid non-expired provisioning profiles match"));
// Match tvOS profile.
workspace.addBuckConfigLocalOption(
"apple", "provisioning_profile_search_path", "provisioning_profiles_tvos");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
}
@Test
public void simpleApplicationBundleWithTargetCodeSigning() throws Exception {
assertTargetCodesignToolIsUsedFor("//:DemoApp#iphoneos-arm64,no-debug");
}
@Test
public void simpleFatApplicationBundleWithTargetCodeSigning() throws Exception {
assertTargetCodesignToolIsUsedFor("//:DemoApp#iphoneos-arm64,iphoneos-armv7,no-debug");
}
private void assertTargetCodesignToolIsUsedFor(String fullyQualifiedName) throws Exception {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_target_codesigning", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget(fullyQualifiedName);
ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName());
// custom codesign tool exits with non-zero error code and prints a message to the stderr, so
// that its use can be detected
assertThat(buildResult.getStderr(), containsString("codesign was here"));
}
private NSDictionary verifyAndParsePlist(Path path) throws Exception {
assertTrue(Files.exists(path));
String resultContents = filesystem.readFileIfItExists(path).get();
NSDictionary resultPlist =
(NSDictionary) PropertyListParser.parse(resultContents.getBytes(Charsets.UTF_8));
return resultPlist;
}
@Test
public void simpleApplicationBundleWithDryRunCodeSigning() throws Exception {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_codesigning", tmp);
workspace.setUp();
workspace.addBuckConfigLocalOption("apple", "dry_run_code_signing", "true");
BuildTarget target =
workspace.newBuildTarget("//:DemoAppWithFramework#iphoneos-arm64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path codeSignResultsPath = appPath.resolve("BUCK_code_sign_entitlements.plist");
assertTrue(Files.exists(codeSignResultsPath));
NSDictionary resultPlist = verifyAndParsePlist(appPath.resolve("BUCK_pp_dry_run.plist"));
assertEquals(new NSString("com.example.DemoApp"), resultPlist.get("bundle-id"));
assertEquals(new NSString("12345ABCDE"), resultPlist.get("team-identifier"));
assertEquals(
new NSString("00000000-0000-0000-0000-000000000000"),
resultPlist.get("provisioning-profile-uuid"));
// Codesigning main bundle
resultPlist = verifyAndParsePlist(appPath.resolve("BUCK_code_sign_args.plist"));
assertEquals(new NSNumber(true), resultPlist.get("use-entitlements"));
// Codesigning embedded framework bundle
resultPlist =
verifyAndParsePlist(
appPath.resolve("Frameworks/DemoFramework.framework/BUCK_code_sign_args.plist"));
assertEquals(new NSNumber(false), resultPlist.get("use-entitlements"));
}
@Test
public void simpleApplicationBundleWithEmbeddedFrameworks() throws Exception {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_codesigning", tmp);
workspace.setUp();
BuildTarget appTarget =
workspace.newBuildTarget(
"//:DemoAppWithFramework#iphoneos-arm64,no-debug,include-frameworks");
workspace.runBuckCommand("build", appTarget.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoAppWithFramework_output.expected"),
BuildTargets.getGenPath(filesystem, appTarget, "%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(filesystem, appTarget, "%s")
.resolve(appTarget.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(appTarget.getShortName())));
assertTrue(checkCodeSigning(appPath));
BuildTarget frameworkTarget =
workspace.newBuildTarget("//:DemoFramework#iphoneos-arm64,no-debug,no-include-frameworks");
Path frameworkPath =
workspace.getPath(
BuildTargets.getGenPath(filesystem, frameworkTarget, "%s")
.resolve(frameworkTarget.getShortName() + ".framework"));
assertFalse(checkCodeSigning(frameworkPath));
Path embeddedFrameworkPath = appPath.resolve(Paths.get("Frameworks/DemoFramework.framework"));
assertTrue(Files.exists(embeddedFrameworkPath.resolve(frameworkTarget.getShortName())));
assertTrue(checkCodeSigning(embeddedFrameworkPath));
}
// Specifying entitlments file via apple_binary entitlements_file
@Test
public void simpleApplicationBundleWithCodeSigningAndEntitlements()
throws IOException, InterruptedException {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_codesigning_and_entitlements", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#iphoneos-arm64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
workspace.assertFilesEqual(
Paths.get("DemoApp.xcent.expected"),
BuildTargets.getScratchPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s.xcent"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(target.getShortName())));
assertTrue(checkCodeSigning(appPath));
}
// Legacy method -- specifying entitlments file via info_plist_substitutions
@Test
public void simpleApplicationBundleWithCodeSigningAndEntitlementsUsingInfoPlistSubstitutions()
throws IOException, InterruptedException {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_with_codesigning_and_entitlements", tmp);
workspace.setUp();
BuildTarget target =
BuildTargetFactory.newInstance(
"//:DemoAppUsingInfoPlistSubstitutions#iphoneos-arm64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.assertFilesEqual(
Paths.get("DemoApp.xcent.expected"),
BuildTargets.getScratchPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s.xcent"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(target.getShortName())));
assertTrue(checkCodeSigning(appPath));
}
@Test
public void simpleApplicationBundleWithFatBinary() throws IOException, InterruptedException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_fat_application_bundle_no_debug", tmp);
workspace.setUp();
BuildTarget target =
workspace.newBuildTarget("//:DemoApp#iphonesimulator-i386,iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path outputFile = appPath.resolve(target.getShortName());
assertTrue(Files.exists(outputFile));
ProcessExecutor.Result result =
workspace.runCommand("lipo", outputFile.toString(), "-verify_arch", "i386", "x86_64");
assertEquals(0, result.getExitCode());
}
@Test
public void bundleHasOutputPath() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_no_debug", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
ProcessResult result =
workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertEquals(
String.format("%s %s", target.getFullyQualifiedName(), appPath), result.getStdout().trim());
}
@Test
public void extensionBundle() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple_extension", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoExtension#no-debug");
ProcessResult result =
workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
Path extensionPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".appex");
assertEquals(
String.format("%s %s", target.getFullyQualifiedName(), extensionPath),
result.getStdout().trim());
result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
Path outputBinary = workspace.getPath(extensionPath.resolve(target.getShortName()));
assertTrue(
String.format(
"Extension binary could not be found inside the appex dir [%s].", outputBinary),
Files.exists(outputBinary));
}
@Test
public void appBundleWithExtensionBundleDependency() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple_app_with_extension", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithExtension#no-debug");
ProcessResult result =
workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertEquals(
String.format("%s %s", target.getFullyQualifiedName(), appPath), result.getStdout().trim());
result = workspace.runBuckCommand("build", target.getFullyQualifiedName());
result.assertSuccess();
assertTrue(Files.exists(workspace.getPath(appPath.resolve("DemoAppWithExtension"))));
assertTrue(
Files.exists(
workspace.getPath(appPath.resolve("PlugIns/DemoExtension.appex/DemoExtension"))));
}
@Test
public void bundleBinaryHasDsymBundle() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_dwarf_and_dsym", tmp);
workspace.setUp();
BuildTarget target =
workspace.newBuildTarget("//:DemoApp#dwarf-and-dsym,iphonesimulator-x86_64");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path bundlePath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path dwarfPath =
bundlePath.getParent().resolve("DemoApp.app.dSYM/Contents/Resources/DWARF/DemoApp");
Path binaryPath = bundlePath.resolve("DemoApp");
assertTrue(Files.exists(dwarfPath));
AppleDsymTestUtil.checkDsymFileHasDebugSymbolForMain(workspace, dwarfPath);
ProcessExecutor.Result result =
workspace.runCommand("dsymutil", "-o", binaryPath + ".test.dSYM", binaryPath.toString());
String dsymutilOutput = "";
if (result.getStderr().isPresent()) {
dsymutilOutput = result.getStderr().get();
}
if (dsymutilOutput.isEmpty()) {
assertThat(result.getStdout().isPresent(), is(true));
dsymutilOutput = result.getStdout().get();
}
assertThat(dsymutilOutput, containsString("warning: no debug symbols in executable"));
}
@Test
public void bundleBinaryHasLinkerMapFile() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_dwarf_and_dsym", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target
.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR)
.withAppendedFlavors(AppleDebugFormat.DWARF_AND_DSYM.getFlavor()),
"%s"));
BuildTarget binaryWithLinkerMap =
workspace.newBuildTarget("//:DemoAppBinary#iphonesimulator-x86_64");
Path binaryWithLinkerMapPath = BuildTargets.getGenPath(filesystem, binaryWithLinkerMap, "%s");
Path linkMapPath = BuildTargets.getGenPath(filesystem, binaryWithLinkerMap, "%s-LinkMap.txt");
assertThat(Files.exists(workspace.resolve(binaryWithLinkerMapPath)), Matchers.equalTo(true));
assertThat(Files.exists(workspace.resolve(linkMapPath)), Matchers.equalTo(true));
BuildTarget binaryWithoutLinkerMap =
workspace
.newBuildTarget("//:DemoAppBinary#iphonesimulator-x86_64")
.withAppendedFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor());
Path binaryWithoutLinkerMapPath =
BuildTargets.getGenPath(filesystem, binaryWithoutLinkerMap, "%s");
assertThat(
Files.exists(workspace.resolve(binaryWithoutLinkerMapPath)), Matchers.equalTo(false));
}
public String runSimpleBuildWithDefinedStripStyle(StripStyle stripStyle) throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_no_debug", tmp);
workspace.setUp();
BuildTarget target =
workspace.newBuildTarget(
"//:DemoApp#iphonesimulator-x86_64," + stripStyle.getFlavor().getName());
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR,
stripStyle.getFlavor(),
AppleDebugFormat.NONE.getFlavor()),
"%s"));
Path bundlePath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR,
stripStyle.getFlavor(),
AppleDebugFormat.NONE.getFlavor()),
"%s")
.resolve(target.getShortName() + ".app"));
Path binaryPath = bundlePath.resolve("DemoApp");
ProcessExecutor.Result result = workspace.runCommand("nm", binaryPath.toString());
return result.getStdout().orElse("");
}
@Test
public void bundleBinaryWithStripStyleAllDoesNotContainAnyDebugInfo() throws Exception {
String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.ALL_SYMBOLS);
assertThat(nmOutput, not(containsString("t -[AppDelegate window]")));
assertThat(nmOutput, not(containsString("S _OBJC_METACLASS_$_AppDelegate")));
}
@Test
public void bundleBinaryWithStripStyleNonGlobalContainsOnlyGlobals() throws Exception {
String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.NON_GLOBAL_SYMBOLS);
assertThat(nmOutput, not(containsString("t -[AppDelegate window]")));
assertThat(nmOutput, containsString("S _OBJC_METACLASS_$_AppDelegate"));
}
@Test
public void bundleBinaryWithStripStyleDebuggingContainsGlobalsAndLocals() throws Exception {
String nmOutput = runSimpleBuildWithDefinedStripStyle(StripStyle.DEBUGGING_SYMBOLS);
assertThat(nmOutput, containsString("t -[AppDelegate window]"));
assertThat(nmOutput, containsString("S _OBJC_METACLASS_$_AppDelegate"));
}
@Test
public void appBundleWithResources() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "app_bundle_with_resources", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
}
@Test
public void appBundleWithConflictingFileAndFolderResources() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_conflicting_resources", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertFailure();
}
@Test
public void appBundleWithConflictingNestedFolderResources() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_conflicting_nested_resources", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertFailure();
}
@Test
public void appBundleWithConflictingFilenamesInNestedFolders() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_conflicting_filenames_in_nested_folders", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.DWARF_AND_DSYM.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
}
@Test
public void appBundleVariantDirectoryMustEndInLproj() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_invalid_variant", tmp);
workspace.setUp();
ProcessResult processResult =
workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug");
processResult.assertFailure();
assertThat(
processResult.getStderr(),
allOf(
containsString("Variant files have to be in a directory with name ending in '.lproj',"),
containsString("/cc/Localizable.strings' is not.")));
}
@Test
public void defaultPlatformInBuckConfig() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "default_platform_in_buckconfig_app_bundle", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.DWARF_AND_DSYM.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.DWARF_AND_DSYM.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName()))));
}
@Test
public void defaultPlatformInBuckConfigWithFlavorSpecified() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "default_platform_in_buckconfig_flavored_app_bundle", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName()))));
}
@Test
public void appleAssetCatalogsAreIncludedInBundle() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_asset_catalogs_are_included_in_bundle", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
Path outputPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s");
workspace.verify(Paths.get("DemoApp_output.expected"), outputPath);
Path appPath = outputPath.resolve(target.getShortName() + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve("Assets.car"))));
}
@Test
public void generatedAppleAssetCatalogsAreIncludedInBundle() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_asset_catalogs_are_included_in_bundle", tmp);
workspace.setUp();
BuildTarget appTarget = BuildTargetFactory.newInstance("//:CombinedAssetsApp#no-debug");
BuildTarget genruleTarget = BuildTargetFactory.newInstance("//:MakeCombinedAssets");
BuildTarget assetTarget = appTarget.withAppendedFlavors(AppleAssetCatalog.FLAVOR);
workspace.runBuckCommand("build", appTarget.getFullyQualifiedName()).assertSuccess();
// Check that the genrule was invoked
workspace.getBuildLog().assertTargetBuiltLocally(genruleTarget.getFullyQualifiedName());
// Check the actool output: Merged.bundle/Assets.car
assertFileInOutputContainsString(
"Image2", workspace, assetTarget, "%s/Merged.bundle/Assets.car");
// Check the app package: Assets.car
assertFileInOutputContainsString(
"Image2",
workspace,
appTarget.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s/" + appTarget.getShortName() + ".app/Assets.car");
}
@Test
public void appleAssetCatalogsWithCompilationOptions() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_asset_catalogs_are_included_in_bundle", tmp);
workspace.setUp();
BuildTarget target =
BuildTargetFactory.newInstance("//:DemoAppWithAssetCatalogCompilationOptions#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
}
private void assertFileInOutputContainsString(
String needle, ProjectWorkspace workspace, BuildTarget target, String genPathFormat)
throws IOException {
Path outputPath = BuildTargets.getGenPath(filesystem, target, genPathFormat);
Path path = workspace.getPath(outputPath);
assertTrue(Files.exists(path));
String contents = workspace.getFileContents(outputPath);
assertTrue(contents.contains(needle));
}
@Test
public void appleAssetCatalogsWithMoreThanOneAppIconOrLaunchImageShouldFail() throws IOException {
thrown.expect(HumanReadableException.class);
thrown.expectMessage("At most one asset catalog in the dependencies of");
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_asset_catalogs_are_included_in_bundle", tmp);
workspace.setUp();
BuildTarget target =
BuildTargetFactory.newInstance("//:DemoAppWithMoreThanOneIconAndLaunchImage#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName());
}
@Test
public void appleBundleDoesNotPropagateIncludeFrameworkFlavors() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "simple_app_with_extension", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoAppWithExtension#no-debug");
ProcessResult result =
workspace.runBuckCommand("build", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
BuckBuildLog buckBuildLog = workspace.getBuildLog();
ImmutableSet<String> targetsThatShouldContainIncludeFrameworkFlavors =
ImmutableSet.of("//:DemoAppWithExtension", "//:DemoExtension");
ImmutableSet<Flavor> includeFrameworkFlavors =
ImmutableSet.of(
InternalFlavor.of("no-include-frameworks"), InternalFlavor.of("include-frameworks"));
for (BuildTarget builtTarget : buckBuildLog.getAllTargets()) {
if (Sets.intersection(builtTarget.getFlavors(), includeFrameworkFlavors).isEmpty()) {
assertThat(
builtTarget.getUnflavoredBuildTarget().getFullyQualifiedName(),
not(in(targetsThatShouldContainIncludeFrameworkFlavors)));
} else {
assertThat(
builtTarget.getUnflavoredBuildTarget().getFullyQualifiedName(),
in(targetsThatShouldContainIncludeFrameworkFlavors));
}
}
}
@Test
public void infoPlistSubstitutionsAreApplied() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "application_bundle_with_substitutions", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve(target.getShortName()))));
NSDictionary plist =
(NSDictionary)
PropertyListParser.parse(
Files.readAllBytes(workspace.getPath(appPath.resolve("Info.plist"))));
assertThat(
"Should contain xcode build version",
(String) plist.get("DTXcodeBuild").toJavaObject(),
not(emptyString()));
}
@Test
public void infoPlistSubstitutionsAreAppliedToEntitlements()
throws IOException, InterruptedException {
assumeTrue(FakeAppleDeveloperEnvironment.supportsCodeSigning());
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "application_bundle_with_entitlements_substitutions", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#iphoneos-arm64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
workspace.assertFilesEqual(
Paths.get("DemoApp.xcent.expected"),
BuildTargets.getScratchPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s.xcent"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
assertTrue(Files.exists(appPath.resolve(target.getShortName())));
assertTrue(checkCodeSigning(appPath));
}
@Test
public void productNameChangesBundleAndBinaryNames() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "application_bundle_with_product_name", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug").assertSuccess();
BuildTarget target =
BuildTargetFactory.newInstance("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify();
String productName = "BrandNewProduct";
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(productName + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve(productName))));
}
@Test
public void infoPlistWithUnrecognizedVariableFails() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "application_bundle_with_invalid_substitutions", tmp);
workspace.setUp();
workspace.runBuckCommand("build", "//:DemoApp#iphonesimulator-x86_64,no-debug").assertFailure();
}
@Test
public void resourcesAreCompiled() throws Exception {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_compiled_resources", tmp);
workspace.setUp();
BuildTarget target = workspace.newBuildTarget("//:DemoApp#iphonesimulator-x86_64,no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertTrue(Files.exists(workspace.getPath(appPath.resolve("AppViewController.nib"))));
assertTrue(Files.exists(workspace.getPath(appPath.resolve("Model.momd"))));
assertTrue(Files.exists(workspace.getPath(appPath.resolve("Model2.momd"))));
assertTrue(Files.exists(workspace.getPath(appPath.resolve("DemoApp.scnassets"))));
}
@Test
public void watchApplicationBundle() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.WATCHOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "watch_application_bundle", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path watchAppPath = appPath.resolve("Watch/DemoWatchApp.app");
assertTrue(Files.exists(watchAppPath.resolve("DemoWatchApp")));
assertTrue(
Files.exists(
watchAppPath.resolve("PlugIns/DemoWatchAppExtension.appex/DemoWatchAppExtension")));
assertTrue(Files.exists(watchAppPath.resolve("Interface.plist")));
}
@Test
public void legacyWatchApplicationBundle() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.WATCHOS));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "legacy_watch_application_bundle", tmp);
workspace.setUp();
BuildTarget target =
BuildTargetFactory.newInstance(
"//:DemoApp#no-debug,iphonesimulator-x86_64,iphonesimulator-i386");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
workspace.verify(
Paths.get("DemoApp_output.expected"),
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s"));
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path watchExtensionPath = appPath.resolve("Plugins/DemoWatchAppExtension.appex");
assertTrue(Files.exists(watchExtensionPath.resolve("DemoWatchAppExtension")));
assertTrue(Files.exists(watchExtensionPath.resolve("DemoWatchApp.app/DemoWatchApp")));
assertTrue(Files.exists(watchExtensionPath.resolve("DemoWatchApp.app/_WatchKitStub/WK")));
assertTrue(Files.exists(watchExtensionPath.resolve("DemoWatchApp.app/Interface.plist")));
}
@Test
public void copiesFrameworkBundleIntoFrameworkDirectory() throws Exception {
assumeTrue(
AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.IPHONESIMULATOR));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_embedded_framework", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path frameworkPath = appPath.resolve("Frameworks/TestFramework.framework");
assertTrue(Files.exists(frameworkPath.resolve("TestFramework")));
}
@Test
public void onlyIncludesResourcesInBundlesWhichStaticallyLinkThem() throws Exception {
assumeTrue(
AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.IPHONESIMULATOR));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_embedded_framework_and_resources", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
String resourceName = "Resource.plist";
assertFalse(Files.exists(appPath.resolve(resourceName)));
Path frameworkPath = appPath.resolve("Frameworks/TestFramework.framework");
assertTrue(Files.exists(frameworkPath.resolve(resourceName)));
}
@Test
public void testTargetOutputForAppleBundle() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "simple_application_bundle_no_debug", tmp);
workspace.setUp();
ProcessResult result;
// test no-debug output
BuildTarget target = BuildTargetFactory.newInstance("//:DemoApp#no-debug");
result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
Path appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertThat(
result.getStdout(), Matchers.startsWith(target.getFullyQualifiedName() + " " + appPath));
// test debug output
target = BuildTargetFactory.newInstance("//:DemoApp#dwarf-and-dsym");
result = workspace.runBuckCommand("targets", "--show-output", target.getFullyQualifiedName());
result.assertSuccess();
appPath =
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app");
assertThat(
result.getStdout(), Matchers.startsWith(target.getFullyQualifiedName() + " " + appPath));
}
@Test
public void macAppWithExtraBinary() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_osx_app_with_extra_binary", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug");
ProcessResult buildResult =
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
buildResult.assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path AppBinaryPath = appPath.resolve("Contents/MacOS/App");
Path WorkerBinaryPath = appPath.resolve("Contents/MacOS/Worker");
assertTrue(Files.exists(AppBinaryPath));
assertTrue(Files.exists(WorkerBinaryPath));
}
@Test
public void macAppWithXPCService() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "apple_osx_app_with_xpc_service", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug");
ProcessResult buildResult =
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
buildResult.assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path XPCServicePath = appPath.resolve("Contents/XPCServices/Service.xpc");
Path XPCServiceBinaryPath = XPCServicePath.resolve("Contents/MacOS/Service");
Path XPCServiceInfoPlistPath = XPCServicePath.resolve("Contents/Info.plist");
assertTrue(Files.exists(XPCServiceBinaryPath));
assertTrue(Files.exists(XPCServiceInfoPlistPath));
}
@Test
public void macAppWithPlugin() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "apple_osx_app_with_plugin", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug");
ProcessResult buildResult =
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
buildResult.assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path pluginPath = appPath.resolve("Contents/PlugIns/Plugin.plugin");
Path pluginBinaryPath = pluginPath.resolve("Contents/MacOS/Plugin");
Path pluginInfoPlistPath = pluginPath.resolve("Contents/Info.plist");
assertTrue(Files.exists(pluginBinaryPath));
assertTrue(Files.exists(pluginInfoPlistPath));
}
@Test
public void macAppWithPrefPane() throws IOException {
assumeTrue(AppleNativeIntegrationTestUtils.isApplePlatformAvailable(ApplePlatform.MACOSX));
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(this, "apple_osx_app_with_prefpane", tmp);
workspace.setUp();
BuildTarget target = BuildTargetFactory.newInstance("//:App#no-debug");
ProcessResult buildResult =
workspace.runBuckCommand("build", target.getFullyQualifiedName()).assertSuccess();
buildResult.assertSuccess();
Path appPath =
workspace.getPath(
BuildTargets.getGenPath(
filesystem,
target.withAppendedFlavors(
AppleDebugFormat.NONE.getFlavor(),
AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR),
"%s")
.resolve(target.getShortName() + ".app"));
Path prefPanePath = appPath.resolve("Contents/Resources/PrefPane.prefPane");
Path prefPaneBinaryPath = prefPanePath.resolve("Contents/MacOS/PrefPane");
Path prefPaneInfoPlistPath = prefPanePath.resolve("Contents/Info.plist");
assertTrue(Files.exists(prefPaneBinaryPath));
assertTrue(Files.exists(prefPaneInfoPlistPath));
}
@Test
public void resourcesFromOtherCellsCanBeProperlyIncluded() throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "bundle_with_resources_from_other_cells", tmp);
workspace.setUp();
Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64");
assertTrue("Resource file should exist.", Files.isRegularFile(outputPath.resolve("file.txt")));
}
@Test
public void bundleTraversesAppleResourceResourcesFromDepsForAdditionalResources()
throws IOException {
ProjectWorkspace workspace =
TestDataHelper.createProjectWorkspaceForScenario(
this, "app_bundle_with_resources_from_deps", tmp);
workspace.setUp();
Path outputPath = workspace.buildAndReturnOutput("//:bundle#iphonesimulator-x86_64");
assertTrue(
"Resource file should exist.",
Files.isRegularFile(outputPath.resolve("other_resource.txt")));
}
}
| |
package com.king.applib.ui.customview;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RoundRectShape;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewParent;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.DecelerateInterpolator;
import android.widget.FrameLayout;
import android.widget.TabWidget;
/**
* A simple text label view that can be applied as a "badge" to any given {@link android.view.View}.
* This class is intended to be instantiated at runtime rather than included in XML layouts.
*
* @author Jeff Gilfelt
*/
public class BadgeView extends androidx.appcompat.widget.AppCompatTextView {
public static final int POSITION_TOP_LEFT = 1;
public static final int POSITION_TOP_RIGHT = 2;
public static final int POSITION_BOTTOM_LEFT = 3;
public static final int POSITION_BOTTOM_RIGHT = 4;
public static final int POSITION_CENTER = 5;
private static final int DEFAULT_MARGIN_DIP = 5;
private static final int DEFAULT_LR_PADDING_DIP = 5;
private static final int DEFAULT_CORNER_RADIUS_DIP = 8;
private static final int DEFAULT_POSITION = POSITION_TOP_RIGHT;
private static final int DEFAULT_BADGE_COLOR = Color.parseColor("#CCFF0000"); //Color.RED;
private static final int DEFAULT_TEXT_COLOR = Color.WHITE;
private static Animation fadeIn;
private static Animation fadeOut;
private Context context;
private View target;
private int badgePosition;
private int badgeMarginH;
private int badgeMarginV;
private int badgeColor;
private boolean isShown;
private ShapeDrawable badgeBg;
private int targetTabIndex;
public BadgeView(Context context) {
this(context, (AttributeSet) null, android.R.attr.textViewStyle);
}
public BadgeView(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.textViewStyle);
}
/**
* Constructor -
*
* create a new BadgeView instance attached to a target {@link android.view.View}.
*
* @param context context for this view.
* @param target the View to attach the badge to.
*/
public BadgeView(Context context, View target) {
this(context, null, android.R.attr.textViewStyle, target, 0);
}
/**
* Constructor -
*
* create a new BadgeView instance attached to a target {@link android.widget.TabWidget}
* tab at a given index.
*
* @param context context for this view.
* @param target the TabWidget to attach the badge to.
* @param index the position of the tab within the target.
*/
public BadgeView(Context context, TabWidget target, int index) {
this(context, null, android.R.attr.textViewStyle, target, index);
}
public BadgeView(Context context, AttributeSet attrs, int defStyle) {
this(context, attrs, defStyle, null, 0);
}
public BadgeView(Context context, AttributeSet attrs, int defStyle, View target, int tabIndex) {
super(context, attrs, defStyle);
init(context, target, tabIndex);
}
private void init(Context context, View target, int tabIndex) {
this.context = context;
this.target = target;
this.targetTabIndex = tabIndex;
// apply defaults
badgePosition = DEFAULT_POSITION;
badgeMarginH = dipToPixels(DEFAULT_MARGIN_DIP);
badgeMarginV = badgeMarginH;
badgeColor = DEFAULT_BADGE_COLOR;
setTypeface(Typeface.DEFAULT_BOLD);
int paddingPixels = dipToPixels(DEFAULT_LR_PADDING_DIP);
setPadding(paddingPixels, 0, paddingPixels, 0);
setTextColor(DEFAULT_TEXT_COLOR);
fadeIn = new AlphaAnimation(0, 1);
fadeIn.setInterpolator(new DecelerateInterpolator());
fadeIn.setDuration(200);
fadeOut = new AlphaAnimation(1, 0);
fadeOut.setInterpolator(new AccelerateInterpolator());
fadeOut.setDuration(200);
isShown = false;
if (this.target != null) {
applyTo(this.target);
} else {
show();
}
}
private void applyTo(View target) {
ViewGroup.LayoutParams lp = target.getLayoutParams();
ViewParent parent = target.getParent();
FrameLayout container = new FrameLayout(context);
if (target instanceof TabWidget) {
// set target to the relevant tab child container
target = ((TabWidget) target).getChildTabViewAt(targetTabIndex);
this.target = target;
((ViewGroup) target).addView(container, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT));
this.setVisibility(View.GONE);
container.addView(this);
} else {
// TODO verify that parent is indeed a ViewGroup
ViewGroup group = (ViewGroup) parent;
int index = group.indexOfChild(target);
group.removeView(target);
group.addView(container, index, lp);
container.addView(target);
this.setVisibility(View.GONE);
container.addView(this);
group.invalidate();
}
}
/**
* Make the badge visible in the UI.
*
*/
public void show() {
show(false, null);
}
/**
* Make the badge visible in the UI.
*
* @param animate flag to apply the default fade-in animation.
*/
public void show(boolean animate) {
show(animate, fadeIn);
}
/**
* Make the badge visible in the UI.
*
* @param anim Animation to apply to the view when made visible.
*/
public void show(Animation anim) {
show(true, anim);
}
/**
* Make the badge non-visible in the UI.
*
*/
public void hide() {
hide(false, null);
}
/**
* Make the badge non-visible in the UI.
*
* @param animate flag to apply the default fade-out animation.
*/
public void hide(boolean animate) {
hide(animate, fadeOut);
}
/**
* Make the badge non-visible in the UI.
*
* @param anim Animation to apply to the view when made non-visible.
*/
public void hide(Animation anim) {
hide(true, anim);
}
/**
* Toggle the badge visibility in the UI.
*
*/
public void toggle() {
toggle(false, null, null);
}
/**
* Toggle the badge visibility in the UI.
*
* @param animate flag to apply the default fade-in/out animation.
*/
public void toggle(boolean animate) {
toggle(animate, fadeIn, fadeOut);
}
/**
* Toggle the badge visibility in the UI.
*
* @param animIn Animation to apply to the view when made visible.
* @param animOut Animation to apply to the view when made non-visible.
*/
public void toggle(Animation animIn, Animation animOut) {
toggle(true, animIn, animOut);
}
private void show(boolean animate, Animation anim) {
if (getBackground() == null) {
if (badgeBg == null) {
badgeBg = getDefaultBackground();
}
setBackgroundDrawable(badgeBg);
}
applyLayoutParams();
if (animate) {
this.startAnimation(anim);
}
this.setVisibility(View.VISIBLE);
isShown = true;
}
private void hide(boolean animate, Animation anim) {
this.setVisibility(View.GONE);
if (animate) {
this.startAnimation(anim);
}
isShown = false;
}
private void toggle(boolean animate, Animation animIn, Animation animOut) {
if (isShown) {
hide(animate && (animOut != null), animOut);
} else {
show(animate && (animIn != null), animIn);
}
}
/**
* Increment the numeric badge label. If the current badge label cannot be converted to
* an integer value, its label will be set to "0".
*
* @param offset the increment offset.
*/
public int increment(int offset) {
CharSequence txt = getText();
int i;
if (txt != null) {
try {
i = Integer.parseInt(txt.toString());
} catch (NumberFormatException e) {
i = 0;
}
} else {
i = 0;
}
i = i + offset;
setText(String.valueOf(i));
return i;
}
/**
* Decrement the numeric badge label. If the current badge label cannot be converted to
* an integer value, its label will be set to "0".
*
* @param offset the decrement offset.
*/
public int decrement(int offset) {
return increment(-offset);
}
private ShapeDrawable getDefaultBackground() {
int r = dipToPixels(DEFAULT_CORNER_RADIUS_DIP);
float[] outerR = new float[] {r, r, r, r, r, r, r, r};
RoundRectShape rr = new RoundRectShape(outerR, null, null);
ShapeDrawable drawable = new ShapeDrawable(rr);
drawable.getPaint().setColor(badgeColor);
return drawable;
}
private void applyLayoutParams() {
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
switch (badgePosition) {
case POSITION_TOP_LEFT:
lp.gravity = Gravity.LEFT | Gravity.TOP;
lp.setMargins(badgeMarginH, badgeMarginV, 0, 0);
break;
case POSITION_TOP_RIGHT:
lp.gravity = Gravity.RIGHT | Gravity.TOP;
lp.setMargins(0, badgeMarginV, badgeMarginH, 0);
break;
case POSITION_BOTTOM_LEFT:
lp.gravity = Gravity.LEFT | Gravity.BOTTOM;
lp.setMargins(badgeMarginH, 0, 0, badgeMarginV);
break;
case POSITION_BOTTOM_RIGHT:
lp.gravity = Gravity.RIGHT | Gravity.BOTTOM;
lp.setMargins(0, 0, badgeMarginH, badgeMarginV);
break;
case POSITION_CENTER:
lp.gravity = Gravity.CENTER;
lp.setMargins(0, 0, 0, 0);
break;
default:
break;
}
setLayoutParams(lp);
}
/**
* Returns the target View this badge has been attached to.
*
*/
public View getTarget() {
return target;
}
/**
* Is this badge currently visible in the UI?
*
*/
@Override
public boolean isShown() {
return isShown;
}
/**
* Returns the positioning of this badge.
*
* one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, POSITION_BOTTOM_LEFT, POSITION_BOTTOM_RIGHT, POSTION_CENTER.
*
*/
public int getBadgePosition() {
return badgePosition;
}
/**
* Set the positioning of this badge.
*
* @param layoutPosition one of POSITION_TOP_LEFT, POSITION_TOP_RIGHT, POSITION_BOTTOM_LEFT, POSITION_BOTTOM_RIGHT, POSTION_CENTER.
*
*/
public void setBadgePosition(int layoutPosition) {
this.badgePosition = layoutPosition;
}
/**
* Returns the horizontal margin from the target View that is applied to this badge.
*
*/
public int getHorizontalBadgeMargin() {
return badgeMarginH;
}
/**
* Returns the vertical margin from the target View that is applied to this badge.
*
*/
public int getVerticalBadgeMargin() {
return badgeMarginV;
}
/**
* Set the horizontal/vertical margin from the target View that is applied to this badge.
*
* @param badgeMargin the margin in pixels.
*/
public void setBadgeMargin(int badgeMargin) {
this.badgeMarginH = badgeMargin;
this.badgeMarginV = badgeMargin;
}
/**
* Set the horizontal/vertical margin from the target View that is applied to this badge.
*
* @param horizontal margin in pixels.
* @param vertical margin in pixels.
*/
public void setBadgeMargin(int horizontal, int vertical) {
this.badgeMarginH = horizontal;
this.badgeMarginV = vertical;
}
/**
* Returns the color value of the badge background.
*
*/
public int getBadgeBackgroundColor() {
return badgeColor;
}
/**
* Set the color value of the badge background.
*
* @param badgeColor the badge background color.
*/
public void setBadgeBackgroundColor(int badgeColor) {
this.badgeColor = badgeColor;
badgeBg = getDefaultBackground();
}
private int dipToPixels(int dip) {
Resources r = getResources();
float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dip, r.getDisplayMetrics());
return (int) px;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
/**
* <p>
* A description of a single data modification that was performed on an item in
* a DynamoDB table.
* </p>
*/
public class StreamRecord implements Serializable, Cloneable {
/**
* <p>
* The approximate date and time when the stream record was created, in <a
* href="http://www.epochconverter.com/">UNIX epoch time</a> format.
* </p>
*/
private java.util.Date approximateCreationDateTime;
/**
* <p>
* The primary key attribute(s) for the DynamoDB item that was modified.
* </p>
*/
private java.util.Map<String, AttributeValue> keys;
/**
* <p>
* The item in the DynamoDB table as it appeared after it was modified.
* </p>
*/
private java.util.Map<String, AttributeValue> newImage;
/**
* <p>
* The item in the DynamoDB table as it appeared before it was modified.
* </p>
*/
private java.util.Map<String, AttributeValue> oldImage;
/**
* <p>
* The sequence number of the stream record.
* </p>
*/
private String sequenceNumber;
/**
* <p>
* The size of the stream record, in bytes.
* </p>
*/
private Long sizeBytes;
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*/
private String streamViewType;
/**
* <p>
* The approximate date and time when the stream record was created, in <a
* href="http://www.epochconverter.com/">UNIX epoch time</a> format.
* </p>
*
* @param approximateCreationDateTime
* The approximate date and time when the stream record was created,
* in <a href="http://www.epochconverter.com/">UNIX epoch time</a>
* format.
*/
public void setApproximateCreationDateTime(
java.util.Date approximateCreationDateTime) {
this.approximateCreationDateTime = approximateCreationDateTime;
}
/**
* <p>
* The approximate date and time when the stream record was created, in <a
* href="http://www.epochconverter.com/">UNIX epoch time</a> format.
* </p>
*
* @return The approximate date and time when the stream record was created,
* in <a href="http://www.epochconverter.com/">UNIX epoch time</a>
* format.
*/
public java.util.Date getApproximateCreationDateTime() {
return this.approximateCreationDateTime;
}
/**
* <p>
* The approximate date and time when the stream record was created, in <a
* href="http://www.epochconverter.com/">UNIX epoch time</a> format.
* </p>
*
* @param approximateCreationDateTime
* The approximate date and time when the stream record was created,
* in <a href="http://www.epochconverter.com/">UNIX epoch time</a>
* format.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withApproximateCreationDateTime(
java.util.Date approximateCreationDateTime) {
setApproximateCreationDateTime(approximateCreationDateTime);
return this;
}
/**
* <p>
* The primary key attribute(s) for the DynamoDB item that was modified.
* </p>
*
* @return The primary key attribute(s) for the DynamoDB item that was
* modified.
*/
public java.util.Map<String, AttributeValue> getKeys() {
return keys;
}
/**
* <p>
* The primary key attribute(s) for the DynamoDB item that was modified.
* </p>
*
* @param keys
* The primary key attribute(s) for the DynamoDB item that was
* modified.
*/
public void setKeys(java.util.Map<String, AttributeValue> keys) {
this.keys = keys;
}
/**
* <p>
* The primary key attribute(s) for the DynamoDB item that was modified.
* </p>
*
* @param keys
* The primary key attribute(s) for the DynamoDB item that was
* modified.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withKeys(java.util.Map<String, AttributeValue> keys) {
setKeys(keys);
return this;
}
public StreamRecord addKeysEntry(String key, AttributeValue value) {
if (null == this.keys) {
this.keys = new java.util.HashMap<String, AttributeValue>();
}
if (this.keys.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.keys.put(key, value);
return this;
}
/**
* Removes all the entries added into Keys. <p> Returns a reference to
* this object so that method calls can be chained together.
*/
public StreamRecord clearKeysEntries() {
this.keys = null;
return this;
}
/**
* <p>
* The item in the DynamoDB table as it appeared after it was modified.
* </p>
*
* @return The item in the DynamoDB table as it appeared after it was
* modified.
*/
public java.util.Map<String, AttributeValue> getNewImage() {
return newImage;
}
/**
* <p>
* The item in the DynamoDB table as it appeared after it was modified.
* </p>
*
* @param newImage
* The item in the DynamoDB table as it appeared after it was
* modified.
*/
public void setNewImage(java.util.Map<String, AttributeValue> newImage) {
this.newImage = newImage;
}
/**
* <p>
* The item in the DynamoDB table as it appeared after it was modified.
* </p>
*
* @param newImage
* The item in the DynamoDB table as it appeared after it was
* modified.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withNewImage(
java.util.Map<String, AttributeValue> newImage) {
setNewImage(newImage);
return this;
}
public StreamRecord addNewImageEntry(String key, AttributeValue value) {
if (null == this.newImage) {
this.newImage = new java.util.HashMap<String, AttributeValue>();
}
if (this.newImage.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.newImage.put(key, value);
return this;
}
/**
* Removes all the entries added into NewImage. <p> Returns a reference
* to this object so that method calls can be chained together.
*/
public StreamRecord clearNewImageEntries() {
this.newImage = null;
return this;
}
/**
* <p>
* The item in the DynamoDB table as it appeared before it was modified.
* </p>
*
* @return The item in the DynamoDB table as it appeared before it was
* modified.
*/
public java.util.Map<String, AttributeValue> getOldImage() {
return oldImage;
}
/**
* <p>
* The item in the DynamoDB table as it appeared before it was modified.
* </p>
*
* @param oldImage
* The item in the DynamoDB table as it appeared before it was
* modified.
*/
public void setOldImage(java.util.Map<String, AttributeValue> oldImage) {
this.oldImage = oldImage;
}
/**
* <p>
* The item in the DynamoDB table as it appeared before it was modified.
* </p>
*
* @param oldImage
* The item in the DynamoDB table as it appeared before it was
* modified.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withOldImage(
java.util.Map<String, AttributeValue> oldImage) {
setOldImage(oldImage);
return this;
}
public StreamRecord addOldImageEntry(String key, AttributeValue value) {
if (null == this.oldImage) {
this.oldImage = new java.util.HashMap<String, AttributeValue>();
}
if (this.oldImage.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.oldImage.put(key, value);
return this;
}
/**
* Removes all the entries added into OldImage. <p> Returns a reference
* to this object so that method calls can be chained together.
*/
public StreamRecord clearOldImageEntries() {
this.oldImage = null;
return this;
}
/**
* <p>
* The sequence number of the stream record.
* </p>
*
* @param sequenceNumber
* The sequence number of the stream record.
*/
public void setSequenceNumber(String sequenceNumber) {
this.sequenceNumber = sequenceNumber;
}
/**
* <p>
* The sequence number of the stream record.
* </p>
*
* @return The sequence number of the stream record.
*/
public String getSequenceNumber() {
return this.sequenceNumber;
}
/**
* <p>
* The sequence number of the stream record.
* </p>
*
* @param sequenceNumber
* The sequence number of the stream record.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withSequenceNumber(String sequenceNumber) {
setSequenceNumber(sequenceNumber);
return this;
}
/**
* <p>
* The size of the stream record, in bytes.
* </p>
*
* @param sizeBytes
* The size of the stream record, in bytes.
*/
public void setSizeBytes(Long sizeBytes) {
this.sizeBytes = sizeBytes;
}
/**
* <p>
* The size of the stream record, in bytes.
* </p>
*
* @return The size of the stream record, in bytes.
*/
public Long getSizeBytes() {
return this.sizeBytes;
}
/**
* <p>
* The size of the stream record, in bytes.
* </p>
*
* @param sizeBytes
* The size of the stream record, in bytes.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public StreamRecord withSizeBytes(Long sizeBytes) {
setSizeBytes(sizeBytes);
return this;
}
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*
* @param streamViewType
* The type of data from the modified DynamoDB item that was captured
* in this stream record:</p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified
* item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item
* images of the item.
* </p>
* </li>
* @see StreamViewType
*/
public void setStreamViewType(String streamViewType) {
this.streamViewType = streamViewType;
}
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*
* @return The type of data from the modified DynamoDB item that was
* captured in this stream record:</p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified
* item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before
* it was modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item
* images of the item.
* </p>
* </li>
* @see StreamViewType
*/
public String getStreamViewType() {
return this.streamViewType;
}
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*
* @param streamViewType
* The type of data from the modified DynamoDB item that was captured
* in this stream record:</p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified
* item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item
* images of the item.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see StreamViewType
*/
public StreamRecord withStreamViewType(String streamViewType) {
setStreamViewType(streamViewType);
return this;
}
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*
* @param streamViewType
* The type of data from the modified DynamoDB item that was captured
* in this stream record:</p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified
* item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item
* images of the item.
* </p>
* </li>
* @see StreamViewType
*/
public void setStreamViewType(StreamViewType streamViewType) {
this.streamViewType = streamViewType.toString();
}
/**
* <p>
* The type of data from the modified DynamoDB item that was captured in
* this stream record:
* </p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it was
* modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item images of
* the item.
* </p>
* </li>
* </ul>
*
* @param streamViewType
* The type of data from the modified DynamoDB item that was captured
* in this stream record:</p>
* <ul>
* <li>
* <p>
* <code>KEYS_ONLY</code> - only the key attributes of the modified
* item.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_IMAGE</code> - the entire item, as it appeared after it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>OLD_IMAGE</code> - the entire item, as it appeared before it
* was modified.
* </p>
* </li>
* <li>
* <p>
* <code>NEW_AND_OLD_IMAGES</code> - both the new and the old item
* images of the item.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see StreamViewType
*/
public StreamRecord withStreamViewType(StreamViewType streamViewType) {
setStreamViewType(streamViewType);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApproximateCreationDateTime() != null)
sb.append("ApproximateCreationDateTime: "
+ getApproximateCreationDateTime() + ",");
if (getKeys() != null)
sb.append("Keys: " + getKeys() + ",");
if (getNewImage() != null)
sb.append("NewImage: " + getNewImage() + ",");
if (getOldImage() != null)
sb.append("OldImage: " + getOldImage() + ",");
if (getSequenceNumber() != null)
sb.append("SequenceNumber: " + getSequenceNumber() + ",");
if (getSizeBytes() != null)
sb.append("SizeBytes: " + getSizeBytes() + ",");
if (getStreamViewType() != null)
sb.append("StreamViewType: " + getStreamViewType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StreamRecord == false)
return false;
StreamRecord other = (StreamRecord) obj;
if (other.getApproximateCreationDateTime() == null
^ this.getApproximateCreationDateTime() == null)
return false;
if (other.getApproximateCreationDateTime() != null
&& other.getApproximateCreationDateTime().equals(
this.getApproximateCreationDateTime()) == false)
return false;
if (other.getKeys() == null ^ this.getKeys() == null)
return false;
if (other.getKeys() != null
&& other.getKeys().equals(this.getKeys()) == false)
return false;
if (other.getNewImage() == null ^ this.getNewImage() == null)
return false;
if (other.getNewImage() != null
&& other.getNewImage().equals(this.getNewImage()) == false)
return false;
if (other.getOldImage() == null ^ this.getOldImage() == null)
return false;
if (other.getOldImage() != null
&& other.getOldImage().equals(this.getOldImage()) == false)
return false;
if (other.getSequenceNumber() == null
^ this.getSequenceNumber() == null)
return false;
if (other.getSequenceNumber() != null
&& other.getSequenceNumber().equals(this.getSequenceNumber()) == false)
return false;
if (other.getSizeBytes() == null ^ this.getSizeBytes() == null)
return false;
if (other.getSizeBytes() != null
&& other.getSizeBytes().equals(this.getSizeBytes()) == false)
return false;
if (other.getStreamViewType() == null
^ this.getStreamViewType() == null)
return false;
if (other.getStreamViewType() != null
&& other.getStreamViewType().equals(this.getStreamViewType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getApproximateCreationDateTime() == null) ? 0
: getApproximateCreationDateTime().hashCode());
hashCode = prime * hashCode
+ ((getKeys() == null) ? 0 : getKeys().hashCode());
hashCode = prime * hashCode
+ ((getNewImage() == null) ? 0 : getNewImage().hashCode());
hashCode = prime * hashCode
+ ((getOldImage() == null) ? 0 : getOldImage().hashCode());
hashCode = prime
* hashCode
+ ((getSequenceNumber() == null) ? 0 : getSequenceNumber()
.hashCode());
hashCode = prime * hashCode
+ ((getSizeBytes() == null) ? 0 : getSizeBytes().hashCode());
hashCode = prime
* hashCode
+ ((getStreamViewType() == null) ? 0 : getStreamViewType()
.hashCode());
return hashCode;
}
@Override
public StreamRecord clone() {
try {
return (StreamRecord) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.yarn.cli;
import org.apache.flink.client.cli.CliFrontendParser;
import org.apache.flink.client.cli.CustomCommandLine;
import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.clusterframework.ApplicationStatus;
import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse;
import org.apache.flink.runtime.security.SecurityUtils;
import org.apache.flink.util.Preconditions;
import org.apache.flink.yarn.AbstractYarnClusterDescriptor;
import org.apache.flink.yarn.YarnClusterClient;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import static org.apache.flink.client.cli.CliFrontendParser.ADDRESS_OPTION;
import static org.apache.flink.configuration.HighAvailabilityOptions.HA_ZOOKEEPER_NAMESPACE;
/**
* Class handling the command line interface to the YARN session.
*/
public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient> {
private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnSessionCli.class);
//------------------------------------ Constants -------------------------
public static final String CONFIG_FILE_LOGBACK_NAME = "logback.xml";
public static final String CONFIG_FILE_LOG4J_NAME = "log4j.properties";
private static final int CLIENT_POLLING_INTERVALL = 3;
/** The id for the CommandLine interface. */
private static final String ID = "yarn-cluster";
// YARN-session related constants
private static final String YARN_PROPERTIES_FILE = ".yarn-properties-";
static final String YARN_APPLICATION_ID_KEY = "applicationID";
private static final String YARN_PROPERTIES_PARALLELISM = "parallelism";
private static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString";
private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split()
//------------------------------------ Command Line argument options -------------------------
// the prefix transformation is used by the CliFrontend static constructor.
private final Option query;
// --- or ---
private final Option applicationId;
// --- or ---
private final Option queue;
private final Option shipPath;
private final Option flinkJar;
private final Option jmMemory;
private final Option tmMemory;
private final Option container;
private final Option slots;
private final Option detached;
private final Option zookeeperNamespace;
/**
* @deprecated Streaming mode has been deprecated without replacement. Set the
* {@link ConfigConstants#TASK_MANAGER_MEMORY_PRE_ALLOCATE_KEY} configuration
* key to true to get the previous batch mode behaviour.
*/
@Deprecated
private final Option streaming;
private final Option name;
private final Options allOptions;
/**
* Dynamic properties allow the user to specify additional configuration values with -D, such as
* <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt>.
*/
private final Option dynamicproperties;
private final boolean acceptInteractiveInput;
//------------------------------------ Internal fields -------------------------
private YarnClusterClient yarnCluster;
private boolean detachedMode = false;
public FlinkYarnSessionCli(String shortPrefix, String longPrefix) {
this(shortPrefix, longPrefix, true);
}
public FlinkYarnSessionCli(String shortPrefix, String longPrefix, boolean acceptInteractiveInput) {
this.acceptInteractiveInput = acceptInteractiveInput;
query = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)");
applicationId = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session");
queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
tmMemory = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]");
container = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)");
slots = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager");
dynamicproperties = new Option(shortPrefix + "D", true, "Dynamic properties");
detached = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached");
streaming = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode");
name = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN");
zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
allOptions = new Options();
allOptions.addOption(flinkJar);
allOptions.addOption(jmMemory);
allOptions.addOption(tmMemory);
allOptions.addOption(container);
allOptions.addOption(queue);
allOptions.addOption(query);
allOptions.addOption(shipPath);
allOptions.addOption(slots);
allOptions.addOption(dynamicproperties);
allOptions.addOption(detached);
allOptions.addOption(streaming);
allOptions.addOption(name);
allOptions.addOption(applicationId);
allOptions.addOption(zookeeperNamespace);
}
/**
* Tries to load a Flink Yarn properties file and returns the Yarn application id if successful.
* @param cmdLine The command-line parameters
* @param flinkConfiguration The flink configuration
* @return Yarn application id or null if none could be retrieved
*/
private String loadYarnPropertiesFile(CommandLine cmdLine, Configuration flinkConfiguration) {
String jobManagerOption = cmdLine.getOptionValue(ADDRESS_OPTION.getOpt(), null);
if (jobManagerOption != null) {
// don't resume from properties file if a JobManager has been specified
return null;
}
for (Option option : cmdLine.getOptions()) {
if (allOptions.hasOption(option.getOpt())) {
if (!option.getOpt().equals(detached.getOpt())) {
// don't resume from properties file if yarn options have been specified
return null;
}
}
}
// load the YARN properties
File propertiesFile = getYarnPropertiesLocation(flinkConfiguration);
if (!propertiesFile.exists()) {
return null;
}
logAndSysout("Found YARN properties file " + propertiesFile.getAbsolutePath());
Properties yarnProperties = new Properties();
try {
try (InputStream is = new FileInputStream(propertiesFile)) {
yarnProperties.load(is);
}
}
catch (IOException e) {
throw new RuntimeException("Cannot read the YARN properties file", e);
}
// get the Yarn application id from the properties file
String applicationID = yarnProperties.getProperty(YARN_APPLICATION_ID_KEY);
if (applicationID == null) {
throw new IllegalConfigurationException("Yarn properties file found but doesn't contain a " +
"Yarn application id. Please delete the file at " + propertiesFile.getAbsolutePath());
}
try {
// try converting id to ApplicationId
ConverterUtils.toApplicationId(applicationID);
}
catch (Exception e) {
throw new RuntimeException("YARN properties contains an invalid entry for " +
"application id: " + applicationID, e);
}
logAndSysout("Using Yarn application id from YARN properties " + applicationID);
// configure the default parallelism from YARN
String propParallelism = yarnProperties.getProperty(YARN_PROPERTIES_PARALLELISM);
if (propParallelism != null) { // maybe the property is not set
try {
int parallelism = Integer.parseInt(propParallelism);
flinkConfiguration.setInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, parallelism);
logAndSysout("YARN properties set default parallelism to " + parallelism);
}
catch (NumberFormatException e) {
throw new RuntimeException("Error while parsing the YARN properties: " +
"Property " + YARN_PROPERTIES_PARALLELISM + " is not an integer.");
}
}
// handle the YARN client's dynamic properties
String dynamicPropertiesEncoded = yarnProperties.getProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING);
Map<String, String> dynamicProperties = getDynamicProperties(dynamicPropertiesEncoded);
for (Map.Entry<String, String> dynamicProperty : dynamicProperties.entrySet()) {
flinkConfiguration.setString(dynamicProperty.getKey(), dynamicProperty.getValue());
}
return applicationID;
}
public AbstractYarnClusterDescriptor createDescriptor(String defaultApplicationName, CommandLine cmd) {
AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor();
// Jar Path
Path localJarPath;
if (cmd.hasOption(flinkJar.getOpt())) {
String userPath = cmd.getOptionValue(flinkJar.getOpt());
if (!userPath.startsWith("file://")) {
userPath = "file://" + userPath;
}
localJarPath = new Path(userPath);
} else {
LOG.info("No path for the flink jar passed. Using the location of "
+ yarnClusterDescriptor.getClass() + " to locate the jar");
String encodedJarPath =
yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource().getLocation().getPath();
try {
// we have to decode the url encoded parts of the path
String decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name());
localJarPath = new Path(new File(decodedPath).toURI());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath +
" Please supply a path manually via the -" + flinkJar.getOpt() + " option.");
}
}
yarnClusterDescriptor.setLocalJarPath(localJarPath);
List<File> shipFiles = new ArrayList<>();
// path to directory to ship
if (cmd.hasOption(shipPath.getOpt())) {
String shipPath = cmd.getOptionValue(this.shipPath.getOpt());
File shipDir = new File(shipPath);
if (shipDir.isDirectory()) {
shipFiles.add(shipDir);
} else {
LOG.warn("Ship directory is not a directory. Ignoring it.");
}
}
yarnClusterDescriptor.addShipFiles(shipFiles);
// queue
if (cmd.hasOption(queue.getOpt())) {
yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt()));
}
String[] dynamicProperties = null;
if (cmd.hasOption(dynamicproperties.getOpt())) {
dynamicProperties = cmd.getOptionValues(dynamicproperties.getOpt());
}
String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR);
yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded);
if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
this.detachedMode = true;
yarnClusterDescriptor.setDetachedMode(true);
}
if (cmd.hasOption(name.getOpt())) {
yarnClusterDescriptor.setName(cmd.getOptionValue(name.getOpt()));
} else {
// set the default application name, if none is specified
if (defaultApplicationName != null) {
yarnClusterDescriptor.setName(defaultApplicationName);
}
}
if (cmd.hasOption(zookeeperNamespace.getOpt())) {
String zookeeperNamespace = cmd.getOptionValue(this.zookeeperNamespace.getOpt());
yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace);
}
return yarnClusterDescriptor;
}
public ClusterSpecification createClusterSpecification(Configuration configuration, CommandLine cmd) {
if (!cmd.hasOption(container.getOpt())) { // number of containers is required option!
LOG.error("Missing required argument {}", container.getOpt());
printUsage();
throw new IllegalArgumentException("Missing required argument " + container.getOpt());
}
int numberTaskManagers = Integer.valueOf(cmd.getOptionValue(container.getOpt()));
// JobManager Memory
final int jobManagerMemoryMB;
if (cmd.hasOption(jmMemory.getOpt())) {
jobManagerMemoryMB = Integer.valueOf(cmd.getOptionValue(this.jmMemory.getOpt()));
} else {
jobManagerMemoryMB = configuration.getInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY);
}
// Task Managers memory
final int taskManagerMemoryMB;
if (cmd.hasOption(tmMemory.getOpt())) {
taskManagerMemoryMB = Integer.valueOf(cmd.getOptionValue(this.tmMemory.getOpt()));
} else {
taskManagerMemoryMB = configuration.getInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY);
}
int slotsPerTaskManager;
if (cmd.hasOption(slots.getOpt())) {
slotsPerTaskManager = Integer.valueOf(cmd.getOptionValue(this.slots.getOpt()));
} else {
slotsPerTaskManager = configuration.getInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 1);
}
// convenience
int userParallelism = Integer.valueOf(cmd.getOptionValue(CliFrontendParser.PARALLELISM_OPTION.getOpt(), "-1"));
int maxSlots = slotsPerTaskManager * numberTaskManagers;
if (userParallelism != -1) {
int slotsPerTM = (int) Math.ceil((double) userParallelism / numberTaskManagers);
String message = "The YARN cluster has " + maxSlots + " slots available, " +
"but the user requested a parallelism of " + userParallelism + " on YARN. " +
"Each of the " + numberTaskManagers + " TaskManagers " +
"will get " + slotsPerTM + " slots.";
logAndSysout(message);
slotsPerTaskManager = slotsPerTM;
}
return new ClusterSpecification.ClusterSpecificationBuilder()
.setMasterMemoryMB(jobManagerMemoryMB)
.setTaskManagerMemoryMB(taskManagerMemoryMB)
.setNumberTaskManagers(numberTaskManagers)
.setSlotsPerTaskManager(slotsPerTaskManager)
.createClusterSpecification();
}
private void printUsage() {
System.out.println("Usage:");
HelpFormatter formatter = new HelpFormatter();
formatter.setWidth(200);
formatter.setLeftPadding(5);
formatter.setSyntaxPrefix(" Required");
Options req = new Options();
req.addOption(container);
formatter.printHelp(" ", req);
formatter.setSyntaxPrefix(" Optional");
Options options = new Options();
addGeneralOptions(options);
addRunOptions(options);
formatter.printHelp(" ", options);
}
private static void writeYarnProperties(Properties properties, File propertiesFile) {
try (final OutputStream out = new FileOutputStream(propertiesFile)) {
properties.store(out, "Generated YARN properties file");
} catch (IOException e) {
throw new RuntimeException("Error writing the properties file", e);
}
propertiesFile.setReadable(true, false); // readable for all.
}
public static void runInteractiveCli(YarnClusterClient yarnCluster, boolean readConsoleInput) {
final String help = "Available commands:\n" +
"help - show these commands\n" +
"stop - stop the YARN session";
int numTaskmanagers = 0;
try {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
label:
while (true) {
// ------------------ check if there are updates by the cluster -----------
try {
GetClusterStatusResponse status = yarnCluster.getClusterStatus();
LOG.debug("Received status message: {}", status);
if (status != null && numTaskmanagers != status.numRegisteredTaskManagers()) {
System.err.println("Number of connected TaskManagers changed to " +
status.numRegisteredTaskManagers() + ". " +
"Slots available: " + status.totalNumberOfSlots());
numTaskmanagers = status.numRegisteredTaskManagers();
}
} catch (Exception e) {
LOG.warn("Could not retrieve the current cluster status. Skipping current retrieval attempt ...", e);
}
List<String> messages = yarnCluster.getNewMessages();
if (messages != null && messages.size() > 0) {
System.err.println("New messages from the YARN cluster: ");
for (String msg : messages) {
System.err.println(msg);
}
}
if (yarnCluster.getApplicationStatus() != ApplicationStatus.SUCCEEDED) {
System.err.println("The YARN cluster has failed");
yarnCluster.shutdown();
}
// wait until CLIENT_POLLING_INTERVAL is over or the user entered something.
long startTime = System.currentTimeMillis();
while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000
&& (!readConsoleInput || !in.ready())) {
Thread.sleep(200);
}
//------------- handle interactive command by user. ----------------------
if (readConsoleInput && in.ready()) {
String command = in.readLine();
switch (command) {
case "quit":
case "stop":
yarnCluster.shutdownCluster();
break label;
case "help":
System.err.println(help);
break;
default:
System.err.println("Unknown command '" + command + "'. Showing help: \n" + help);
break;
}
}
if (yarnCluster.hasBeenShutdown()) {
LOG.info("Stopping interactive command line interface, YARN cluster has been stopped.");
break;
}
}
} catch (Exception e) {
LOG.warn("Exception while running the interactive command line interface", e);
}
}
public static void main(final String[] args) throws Exception {
Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration();
final FlinkYarnSessionCli cli = new FlinkYarnSessionCli("", ""); // no prefix for the YARN session
SecurityUtils.install(new SecurityUtils.SecurityConfiguration(flinkConfiguration));
int retCode = SecurityUtils.getInstalledContext().runSecured(new Callable<Integer>() {
@Override
public Integer call() {
return cli.run(args);
}
});
System.exit(retCode);
}
@Override
public boolean isActive(CommandLine commandLine, Configuration configuration) {
String jobManagerOption = commandLine.getOptionValue(ADDRESS_OPTION.getOpt(), null);
boolean yarnJobManager = ID.equals(jobManagerOption);
boolean yarnAppId = commandLine.hasOption(applicationId.getOpt());
return yarnJobManager || yarnAppId || loadYarnPropertiesFile(commandLine, configuration) != null;
}
@Override
public String getId() {
return ID;
}
@Override
public void addRunOptions(Options baseOptions) {
for (Object option : allOptions.getOptions()) {
baseOptions.addOption((Option) option);
}
}
@Override
public void addGeneralOptions(Options baseOptions) {
baseOptions.addOption(applicationId);
}
@Override
public YarnClusterClient retrieveCluster(
CommandLine cmdLine,
Configuration config) throws UnsupportedOperationException {
// first check for an application id, then try to load from yarn properties
String applicationID = cmdLine.hasOption(applicationId.getOpt()) ?
cmdLine.getOptionValue(applicationId.getOpt())
: loadYarnPropertiesFile(cmdLine, config);
if (null != applicationID) {
String zkNamespace = cmdLine.hasOption(zookeeperNamespace.getOpt()) ?
cmdLine.getOptionValue(zookeeperNamespace.getOpt())
: config.getString(HighAvailabilityOptions.HA_CLUSTER_ID, applicationID);
config.setString(HighAvailabilityOptions.HA_CLUSTER_ID, zkNamespace);
AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
yarnDescriptor.setFlinkConfiguration(config);
return yarnDescriptor.retrieve(applicationID);
} else {
throw new UnsupportedOperationException("Could not resume a Yarn cluster.");
}
}
@Override
public YarnClusterClient createCluster(
String applicationName,
CommandLine cmdLine,
Configuration config,
List<URL> userJarFiles) {
Preconditions.checkNotNull(userJarFiles, "User jar files should not be null.");
AbstractYarnClusterDescriptor yarnClusterDescriptor = createDescriptor(applicationName, cmdLine);
ClusterSpecification clusterSpecification = createClusterSpecification(config, cmdLine);
yarnClusterDescriptor.setFlinkConfiguration(config);
yarnClusterDescriptor.setProvidedUserJarFiles(userJarFiles);
try {
return yarnClusterDescriptor.deploySessionCluster(clusterSpecification);
} catch (Exception e) {
throw new RuntimeException("Error deploying the YARN cluster", e);
}
}
public int run(String[] args) {
//
// Command Line Options
//
Options options = new Options();
addGeneralOptions(options);
addRunOptions(options);
CommandLineParser parser = new PosixParser();
CommandLine cmd;
try {
cmd = parser.parse(options, args);
} catch (Exception e) {
System.out.println(e.getMessage());
printUsage();
return 1;
}
// Query cluster for metrics
if (cmd.hasOption(query.getOpt())) {
AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
String description;
try {
description = yarnDescriptor.getClusterDescription();
} catch (Exception e) {
System.err.println("Error while querying the YARN cluster for available resources: " + e.getMessage());
e.printStackTrace(System.err);
return 1;
}
System.out.println(description);
return 0;
} else if (cmd.hasOption(applicationId.getOpt())) {
AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
//configure ZK namespace depending on the value passed
String zkNamespace = cmd.hasOption(zookeeperNamespace.getOpt()) ?
cmd.getOptionValue(zookeeperNamespace.getOpt())
: yarnDescriptor.getFlinkConfiguration()
.getString(HA_ZOOKEEPER_NAMESPACE, cmd.getOptionValue(applicationId.getOpt()));
LOG.info("Going to use the ZK namespace: {}", zkNamespace);
yarnDescriptor.getFlinkConfiguration().setString(HA_ZOOKEEPER_NAMESPACE, zkNamespace);
try {
yarnCluster = yarnDescriptor.retrieve(cmd.getOptionValue(applicationId.getOpt()));
} catch (Exception e) {
throw new RuntimeException("Could not retrieve existing Yarn application", e);
}
if (detachedMode) {
LOG.info("The Flink YARN client has been started in detached mode. In order to stop " +
"Flink on YARN, use the following command or a YARN web interface to stop it:\n" +
"yarn application -kill " + applicationId.getOpt());
yarnCluster.disconnect();
} else {
runInteractiveCli(yarnCluster, true);
}
} else {
AbstractYarnClusterDescriptor yarnDescriptor;
try {
yarnDescriptor = createDescriptor(null, cmd);
} catch (Exception e) {
System.err.println("Error while starting the YARN Client: " + e.getMessage());
e.printStackTrace(System.err);
return 1;
}
final ClusterSpecification clusterSpecification = createClusterSpecification(yarnDescriptor.getFlinkConfiguration(), cmd);
try {
yarnCluster = yarnDescriptor.deploySessionCluster(clusterSpecification);
} catch (Exception e) {
System.err.println("Error while deploying YARN cluster: " + e.getMessage());
e.printStackTrace(System.err);
return 1;
}
//------------------ ClusterClient deployed, handle connection details
String jobManagerAddress =
yarnCluster.getJobManagerAddress().getAddress().getHostName() +
":" + yarnCluster.getJobManagerAddress().getPort();
System.out.println("Flink JobManager is now running on " + jobManagerAddress);
System.out.println("JobManager Web Interface: " + yarnCluster.getWebInterfaceURL());
// file that we write into the conf/ dir containing the jobManager address and the dop.
File yarnPropertiesFile = getYarnPropertiesLocation(yarnCluster.getFlinkConfiguration());
Properties yarnProps = new Properties();
yarnProps.setProperty(YARN_APPLICATION_ID_KEY, yarnCluster.getApplicationId().toString());
if (clusterSpecification.getSlotsPerTaskManager() != -1) {
String parallelism =
Integer.toString(clusterSpecification.getSlotsPerTaskManager() * clusterSpecification.getNumberTaskManagers());
yarnProps.setProperty(YARN_PROPERTIES_PARALLELISM, parallelism);
}
// add dynamic properties
if (yarnDescriptor.getDynamicPropertiesEncoded() != null) {
yarnProps.setProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING,
yarnDescriptor.getDynamicPropertiesEncoded());
}
writeYarnProperties(yarnProps, yarnPropertiesFile);
//------------------ ClusterClient running, let user control it ------------
if (detachedMode) {
// print info and quit:
LOG.info("The Flink YARN client has been started in detached mode. In order to stop " +
"Flink on YARN, use the following command or a YARN web interface to stop it:\n" +
"yarn application -kill " + yarnCluster.getApplicationId());
yarnCluster.waitForClusterToBeReady();
yarnCluster.disconnect();
} else {
runInteractiveCli(yarnCluster, acceptInteractiveInput);
}
}
return 0;
}
/**
* Utility method for tests.
*/
public void stop() {
if (yarnCluster != null) {
LOG.info("Command line interface is shutting down the yarnCluster");
try {
yarnCluster.shutdown();
} catch (Throwable t) {
LOG.warn("Could not properly shutdown the yarn cluster.", t);
}
}
}
private void logAndSysout(String message) {
LOG.info(message);
System.out.println(message);
}
public static Map<String, String> getDynamicProperties(String dynamicPropertiesEncoded) {
if (dynamicPropertiesEncoded != null && dynamicPropertiesEncoded.length() > 0) {
Map<String, String> properties = new HashMap<>();
String[] propertyLines = dynamicPropertiesEncoded.split(YARN_DYNAMIC_PROPERTIES_SEPARATOR);
for (String propLine : propertyLines) {
if (propLine == null) {
continue;
}
int firstEquals = propLine.indexOf("=");
if (firstEquals >= 0) {
String key = propLine.substring(0, firstEquals).trim();
String value = propLine.substring(firstEquals + 1, propLine.length()).trim();
if (!key.isEmpty()) {
properties.put(key, value);
}
}
}
return properties;
}
else {
return Collections.emptyMap();
}
}
public static File getYarnPropertiesLocation(Configuration conf) {
String defaultPropertiesFileLocation = System.getProperty("java.io.tmpdir");
String currentUser = System.getProperty("user.name");
String propertiesFileLocation =
conf.getString(ConfigConstants.YARN_PROPERTIES_FILE_LOCATION, defaultPropertiesFileLocation);
return new File(propertiesFileLocation, YARN_PROPERTIES_FILE + currentUser);
}
protected AbstractYarnClusterDescriptor getClusterDescriptor() {
return new YarnClusterDescriptor();
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.animation;
import com.jme3.export.*;
import com.jme3.math.Matrix4f;
import com.jme3.util.TempVars;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* <code>Skeleton</code> is a convenience class for managing a bone hierarchy.
* Skeleton updates the world transforms to reflect the current local
* animated matrixes.
*
* @author Kirill Vainer
*/
public final class Skeleton implements Savable {
private Bone[] rootBones;
private Bone[] boneList;
/**
* Contains the skinning matrices, multiplying it by a vertex effected by a bone
* will cause it to go to the animated position.
*/
private transient Matrix4f[] skinningMatrixes;
/**
* Creates a skeleton from a bone list.
* The root bones are found automatically.
* <p>
* Note that using this constructor will cause the bones in the list
* to have their bind pose recomputed based on their local transforms.
*
* @param boneList The list of bones to manage by this Skeleton
*/
public Skeleton(Bone[] boneList) {
this.boneList = boneList;
List<Bone> rootBoneList = new ArrayList<Bone>();
for (int i = boneList.length - 1; i >= 0; i--) {
Bone b = boneList[i];
if (b.getParent() == null) {
rootBoneList.add(b);
}
}
rootBones = rootBoneList.toArray(new Bone[rootBoneList.size()]);
createSkinningMatrices();
for (int i = rootBones.length - 1; i >= 0; i--) {
Bone rootBone = rootBones[i];
rootBone.update();
rootBone.setBindingPose();
}
}
/**
* Special-purpose copy constructor.
* <p>
* Shallow copies bind pose data from the source skeleton, does not
* copy any other data.
*
* @param source The source Skeleton to copy from
*/
public Skeleton(Skeleton source) {
Bone[] sourceList = source.boneList;
boneList = new Bone[sourceList.length];
for (int i = 0; i < sourceList.length; i++) {
boneList[i] = new Bone(sourceList[i]);
}
rootBones = new Bone[source.rootBones.length];
for (int i = 0; i < rootBones.length; i++) {
rootBones[i] = recreateBoneStructure(source.rootBones[i]);
}
createSkinningMatrices();
for (int i = rootBones.length - 1; i >= 0; i--) {
rootBones[i].update();
}
}
/**
* Serialization only. Do not use.
*/
public Skeleton() {
}
private void createSkinningMatrices() {
skinningMatrixes = new Matrix4f[boneList.length];
for (int i = 0; i < skinningMatrixes.length; i++) {
skinningMatrixes[i] = new Matrix4f();
}
}
private Bone recreateBoneStructure(Bone sourceRoot) {
Bone targetRoot = getBone(sourceRoot.getName());
List<Bone> children = sourceRoot.getChildren();
for (int i = 0; i < children.size(); i++) {
Bone sourceChild = children.get(i);
// find my version of the child
Bone targetChild = getBone(sourceChild.getName());
targetRoot.addChild(targetChild);
recreateBoneStructure(sourceChild);
}
return targetRoot;
}
/**
* Updates world transforms for all bones in this skeleton.
* Typically called after setting local animation transforms.
*/
public void updateWorldVectors() {
for (int i = rootBones.length - 1; i >= 0; i--) {
rootBones[i].update();
}
}
/**
* Saves the current skeleton state as it's binding pose.
*/
public void setBindingPose() {
for (int i = rootBones.length - 1; i >= 0; i--) {
rootBones[i].setBindingPose();
}
}
/**
* Reset the skeleton to bind pose.
*/
public final void reset() {
for (int i = rootBones.length - 1; i >= 0; i--) {
rootBones[i].reset();
}
}
/**
* Reset the skeleton to bind pose and updates the bones
*/
public final void resetAndUpdate() {
for (int i = rootBones.length - 1; i >= 0; i--) {
Bone rootBone = rootBones[i];
rootBone.reset();
rootBone.update();
}
}
/**
* returns the array of all root bones of this skeleton
* @return
*/
public Bone[] getRoots() {
return rootBones;
}
/**
* return a bone for the given index
* @param index
* @return
*/
public Bone getBone(int index) {
return boneList[index];
}
/**
* returns the bone with the given name
* @param name
* @return
*/
public Bone getBone(String name) {
for (int i = 0; i < boneList.length; i++) {
if (boneList[i].getName().equals(name)) {
return boneList[i];
}
}
return null;
}
/**
* returns the bone index of the given bone
* @param bone
* @return
*/
public int getBoneIndex(Bone bone) {
for (int i = 0; i < boneList.length; i++) {
if (boneList[i] == bone) {
return i;
}
}
return -1;
}
/**
* returns the bone index of the bone that has the given name
* @param name
* @return
*/
public int getBoneIndex(String name) {
for (int i = 0; i < boneList.length; i++) {
if (boneList[i].getName().equals(name)) {
return i;
}
}
return -1;
}
/**
* Compute the skining matrices for each bone of the skeleton that would be used to transform vertices of associated meshes
* @return
*/
public Matrix4f[] computeSkinningMatrices() {
TempVars vars = TempVars.get();
for (int i = 0; i < boneList.length; i++) {
boneList[i].getOffsetTransform(skinningMatrixes[i], vars.quat1, vars.vect1, vars.vect2, vars.tempMat3);
}
vars.release();
return skinningMatrixes;
}
/**
* returns the number of bones of this skeleton
* @return
*/
public int getBoneCount() {
return boneList.length;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Skeleton - ").append(boneList.length).append(" bones, ").append(rootBones.length).append(" roots\n");
for (Bone rootBone : rootBones) {
sb.append(rootBone.toString());
}
return sb.toString();
}
public void read(JmeImporter im) throws IOException {
InputCapsule input = im.getCapsule(this);
Savable[] boneRootsAsSav = input.readSavableArray("rootBones", null);
rootBones = new Bone[boneRootsAsSav.length];
System.arraycopy(boneRootsAsSav, 0, rootBones, 0, boneRootsAsSav.length);
Savable[] boneListAsSavable = input.readSavableArray("boneList", null);
boneList = new Bone[boneListAsSavable.length];
System.arraycopy(boneListAsSavable, 0, boneList, 0, boneListAsSavable.length);
createSkinningMatrices();
for (Bone rootBone : rootBones) {
rootBone.update();
rootBone.setBindingPose();
}
}
public void write(JmeExporter ex) throws IOException {
OutputCapsule output = ex.getCapsule(this);
output.write(rootBones, "rootBones", null);
output.write(boneList, "boneList", null);
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index;
import com.orientechnologies.common.listener.OProgressListener;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Fast index for full-text searches.
*
* @author Luca Garulli
*
*/
public class OIndexFullText extends OIndexMultiValues {
public static final String TYPE_ID = OClass.INDEX_TYPE.FULLTEXT.toString();
private static final String CONFIG_STOP_WORDS = "stopWords";
private static final String CONFIG_IGNORE_CHARS = "ignoreChars";
private static String DEF_IGNORE_CHARS = " \r\n\t:;,.|+*/\\=!?[]()'\"";
private static String DEF_STOP_WORDS = "the in a at as and or for his her " + "him this that what which while "
+ "up with be was is";
private final String ignoreChars = DEF_IGNORE_CHARS;
private final Set<String> stopWords;
public OIndexFullText() {
super(TYPE_ID);
stopWords = new HashSet<String>(OStringSerializerHelper.split(DEF_STOP_WORDS, ' '));
}
/**
* Index an entire document field by field and save the index at the end.
*
* @param iDocument
* The document to index
*/
public void indexDocument(final ODocument iDocument) {
modificationLock.requestModificationLock();
try {
Object fieldValue;
for (final String fieldName : iDocument.fieldNames()) {
fieldValue = iDocument.field(fieldName);
put(fieldValue, iDocument);
}
acquireExclusiveLock();
try {
map.save();
} finally {
releaseExclusiveLock();
}
} finally {
modificationLock.releaseModificationLock();
}
}
/**
* Indexes a value and save the index. Splits the value in single words and index each one. Save of the index is responsibility of
* the caller.
*/
@Override
public OIndexFullText put(final Object iKey, final OIdentifiable iSingleValue) {
if (iKey == null)
return this;
modificationLock.requestModificationLock();
try {
final List<String> words = splitIntoWords(iKey.toString());
// FOREACH WORD CREATE THE LINK TO THE CURRENT DOCUMENT
for (final String word : words) {
acquireExclusiveLock();
try {
Set<OIdentifiable> refs;
// SEARCH FOR THE WORD
refs = map.get(word);
if (refs == null)
// WORD NOT EXISTS: CREATE THE KEYWORD CONTAINER THE FIRST TIME THE WORD IS FOUND
refs = new OMVRBTreeRIDSet().setAutoConvert(false);
// ADD THE CURRENT DOCUMENT AS REF FOR THAT WORD
refs.add(iSingleValue);
// SAVE THE INDEX ENTRY
map.put(word, refs);
} finally {
releaseExclusiveLock();
}
}
return this;
} finally {
modificationLock.releaseModificationLock();
}
}
/**
* Splits passed in key on several words and remove records with keys equals to any item of split result and values equals to
* passed in value.
*
* @param iKey
* Key to remove.
* @param value
* Value to remove.
* @return <code>true</code> if at least one record is removed.
*/
@Override
public boolean remove(final Object iKey, final OIdentifiable value) {
modificationLock.requestModificationLock();
try {
final List<String> words = splitIntoWords(iKey.toString());
boolean removed = false;
for (final String word : words) {
acquireExclusiveLock();
try {
final Set<OIdentifiable> recs = map.get(word);
if (recs != null && !recs.isEmpty()) {
if (recs.remove(value)) {
if (recs.isEmpty())
map.remove(word);
else
map.put(word, recs);
removed = true;
}
}
} finally {
releaseExclusiveLock();
}
}
return removed;
} finally {
modificationLock.releaseModificationLock();
}
}
@Override
public OIndexInternal<?> create(String iName, OIndexDefinition iIndexDefinition, ODatabaseRecord iDatabase,
String iClusterIndexName, int[] iClusterIdsToIndex, OProgressListener iProgressListener, OStreamSerializer iValueSerializer) {
if (iIndexDefinition.getFields().size() > 1) {
throw new OIndexException(TYPE_ID + " indexes cannot be used as composite ones.");
}
return super.create(iName, iIndexDefinition, iDatabase, iClusterIndexName, iClusterIdsToIndex, iProgressListener,
iValueSerializer);
}
@Override
public OIndexMultiValues create(String iName, OIndexDefinition indexDefinition, ODatabaseRecord iDatabase,
String iClusterIndexName, int[] iClusterIdsToIndex, OProgressListener iProgressListener) {
if (indexDefinition.getFields().size() > 1) {
throw new OIndexException(TYPE_ID + " indexes cannot be used as composite ones.");
}
return super.create(iName, indexDefinition, iDatabase, iClusterIndexName, iClusterIdsToIndex, iProgressListener);
}
@Override
public ODocument updateConfiguration() {
super.updateConfiguration();
configuration.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
try {
configuration.field(CONFIG_IGNORE_CHARS, ignoreChars);
configuration.field(CONFIG_STOP_WORDS, stopWords);
} finally {
configuration.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return configuration;
}
private List<String> splitIntoWords(final String iKey) {
final List<String> result = new ArrayList<String>();
final List<String> words = OStringSerializerHelper.split(iKey, ' ');
final StringBuilder buffer = new StringBuilder();
// FOREACH WORD CREATE THE LINK TO THE CURRENT DOCUMENT
char c;
boolean ignore;
for (String word : words) {
buffer.setLength(0);
for (int i = 0; i < word.length(); ++i) {
c = word.charAt(i);
ignore = false;
for (int k = 0; k < ignoreChars.length(); ++k)
if (c == ignoreChars.charAt(k)) {
ignore = true;
break;
}
if (!ignore)
buffer.append(c);
}
word = buffer.toString();
// CHECK IF IT'S A STOP WORD
if (stopWords.contains(word))
continue;
result.add(word);
}
return result;
}
public boolean canBeUsedInEqualityOperators() {
return false;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/bigtable/v2/data.proto
package com.google.bigtable.v2;
/**
* <pre>
* Specifies a contiguous range of rows.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.RowRange}
*/
public final class RowRange extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.bigtable.v2.RowRange)
RowRangeOrBuilder {
private static final long serialVersionUID = 0L;
// Use RowRange.newBuilder() to construct.
private RowRange(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RowRange() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RowRange(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
startKeyCase_ = 1;
startKey_ = input.readBytes();
break;
}
case 18: {
startKeyCase_ = 2;
startKey_ = input.readBytes();
break;
}
case 26: {
endKeyCase_ = 3;
endKey_ = input.readBytes();
break;
}
case 34: {
endKeyCase_ = 4;
endKey_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_RowRange_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_RowRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.RowRange.class, com.google.bigtable.v2.RowRange.Builder.class);
}
private int startKeyCase_ = 0;
private java.lang.Object startKey_;
public enum StartKeyCase
implements com.google.protobuf.Internal.EnumLite {
START_KEY_CLOSED(1),
START_KEY_OPEN(2),
STARTKEY_NOT_SET(0);
private final int value;
private StartKeyCase(int value) {
this.value = value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static StartKeyCase valueOf(int value) {
return forNumber(value);
}
public static StartKeyCase forNumber(int value) {
switch (value) {
case 1: return START_KEY_CLOSED;
case 2: return START_KEY_OPEN;
case 0: return STARTKEY_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public StartKeyCase
getStartKeyCase() {
return StartKeyCase.forNumber(
startKeyCase_);
}
private int endKeyCase_ = 0;
private java.lang.Object endKey_;
public enum EndKeyCase
implements com.google.protobuf.Internal.EnumLite {
END_KEY_OPEN(3),
END_KEY_CLOSED(4),
ENDKEY_NOT_SET(0);
private final int value;
private EndKeyCase(int value) {
this.value = value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static EndKeyCase valueOf(int value) {
return forNumber(value);
}
public static EndKeyCase forNumber(int value) {
switch (value) {
case 3: return END_KEY_OPEN;
case 4: return END_KEY_CLOSED;
case 0: return ENDKEY_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public EndKeyCase
getEndKeyCase() {
return EndKeyCase.forNumber(
endKeyCase_);
}
public static final int START_KEY_CLOSED_FIELD_NUMBER = 1;
/**
* <pre>
* Used when giving an inclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_closed = 1;</code>
*/
public com.google.protobuf.ByteString getStartKeyClosed() {
if (startKeyCase_ == 1) {
return (com.google.protobuf.ByteString) startKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
public static final int START_KEY_OPEN_FIELD_NUMBER = 2;
/**
* <pre>
* Used when giving an exclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_open = 2;</code>
*/
public com.google.protobuf.ByteString getStartKeyOpen() {
if (startKeyCase_ == 2) {
return (com.google.protobuf.ByteString) startKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
public static final int END_KEY_OPEN_FIELD_NUMBER = 3;
/**
* <pre>
* Used when giving an exclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_open = 3;</code>
*/
public com.google.protobuf.ByteString getEndKeyOpen() {
if (endKeyCase_ == 3) {
return (com.google.protobuf.ByteString) endKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
public static final int END_KEY_CLOSED_FIELD_NUMBER = 4;
/**
* <pre>
* Used when giving an inclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_closed = 4;</code>
*/
public com.google.protobuf.ByteString getEndKeyClosed() {
if (endKeyCase_ == 4) {
return (com.google.protobuf.ByteString) endKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (startKeyCase_ == 1) {
output.writeBytes(
1, (com.google.protobuf.ByteString) startKey_);
}
if (startKeyCase_ == 2) {
output.writeBytes(
2, (com.google.protobuf.ByteString) startKey_);
}
if (endKeyCase_ == 3) {
output.writeBytes(
3, (com.google.protobuf.ByteString) endKey_);
}
if (endKeyCase_ == 4) {
output.writeBytes(
4, (com.google.protobuf.ByteString) endKey_);
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (startKeyCase_ == 1) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(
1, (com.google.protobuf.ByteString) startKey_);
}
if (startKeyCase_ == 2) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(
2, (com.google.protobuf.ByteString) startKey_);
}
if (endKeyCase_ == 3) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(
3, (com.google.protobuf.ByteString) endKey_);
}
if (endKeyCase_ == 4) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(
4, (com.google.protobuf.ByteString) endKey_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.bigtable.v2.RowRange)) {
return super.equals(obj);
}
com.google.bigtable.v2.RowRange other = (com.google.bigtable.v2.RowRange) obj;
boolean result = true;
result = result && getStartKeyCase().equals(
other.getStartKeyCase());
if (!result) return false;
switch (startKeyCase_) {
case 1:
result = result && getStartKeyClosed()
.equals(other.getStartKeyClosed());
break;
case 2:
result = result && getStartKeyOpen()
.equals(other.getStartKeyOpen());
break;
case 0:
default:
}
result = result && getEndKeyCase().equals(
other.getEndKeyCase());
if (!result) return false;
switch (endKeyCase_) {
case 3:
result = result && getEndKeyOpen()
.equals(other.getEndKeyOpen());
break;
case 4:
result = result && getEndKeyClosed()
.equals(other.getEndKeyClosed());
break;
case 0:
default:
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (startKeyCase_) {
case 1:
hash = (37 * hash) + START_KEY_CLOSED_FIELD_NUMBER;
hash = (53 * hash) + getStartKeyClosed().hashCode();
break;
case 2:
hash = (37 * hash) + START_KEY_OPEN_FIELD_NUMBER;
hash = (53 * hash) + getStartKeyOpen().hashCode();
break;
case 0:
default:
}
switch (endKeyCase_) {
case 3:
hash = (37 * hash) + END_KEY_OPEN_FIELD_NUMBER;
hash = (53 * hash) + getEndKeyOpen().hashCode();
break;
case 4:
hash = (37 * hash) + END_KEY_CLOSED_FIELD_NUMBER;
hash = (53 * hash) + getEndKeyClosed().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.bigtable.v2.RowRange parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.RowRange parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.RowRange parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.RowRange parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.RowRange parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.bigtable.v2.RowRange parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.bigtable.v2.RowRange parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.RowRange parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.RowRange parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.RowRange parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.bigtable.v2.RowRange parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.bigtable.v2.RowRange parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.bigtable.v2.RowRange prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Specifies a contiguous range of rows.
* </pre>
*
* Protobuf type {@code google.bigtable.v2.RowRange}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.bigtable.v2.RowRange)
com.google.bigtable.v2.RowRangeOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_RowRange_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_RowRange_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.bigtable.v2.RowRange.class, com.google.bigtable.v2.RowRange.Builder.class);
}
// Construct using com.google.bigtable.v2.RowRange.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
startKeyCase_ = 0;
startKey_ = null;
endKeyCase_ = 0;
endKey_ = null;
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.bigtable.v2.DataProto.internal_static_google_bigtable_v2_RowRange_descriptor;
}
public com.google.bigtable.v2.RowRange getDefaultInstanceForType() {
return com.google.bigtable.v2.RowRange.getDefaultInstance();
}
public com.google.bigtable.v2.RowRange build() {
com.google.bigtable.v2.RowRange result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.bigtable.v2.RowRange buildPartial() {
com.google.bigtable.v2.RowRange result = new com.google.bigtable.v2.RowRange(this);
if (startKeyCase_ == 1) {
result.startKey_ = startKey_;
}
if (startKeyCase_ == 2) {
result.startKey_ = startKey_;
}
if (endKeyCase_ == 3) {
result.endKey_ = endKey_;
}
if (endKeyCase_ == 4) {
result.endKey_ = endKey_;
}
result.startKeyCase_ = startKeyCase_;
result.endKeyCase_ = endKeyCase_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.bigtable.v2.RowRange) {
return mergeFrom((com.google.bigtable.v2.RowRange)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.bigtable.v2.RowRange other) {
if (other == com.google.bigtable.v2.RowRange.getDefaultInstance()) return this;
switch (other.getStartKeyCase()) {
case START_KEY_CLOSED: {
setStartKeyClosed(other.getStartKeyClosed());
break;
}
case START_KEY_OPEN: {
setStartKeyOpen(other.getStartKeyOpen());
break;
}
case STARTKEY_NOT_SET: {
break;
}
}
switch (other.getEndKeyCase()) {
case END_KEY_OPEN: {
setEndKeyOpen(other.getEndKeyOpen());
break;
}
case END_KEY_CLOSED: {
setEndKeyClosed(other.getEndKeyClosed());
break;
}
case ENDKEY_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.bigtable.v2.RowRange parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.bigtable.v2.RowRange) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int startKeyCase_ = 0;
private java.lang.Object startKey_;
public StartKeyCase
getStartKeyCase() {
return StartKeyCase.forNumber(
startKeyCase_);
}
public Builder clearStartKey() {
startKeyCase_ = 0;
startKey_ = null;
onChanged();
return this;
}
private int endKeyCase_ = 0;
private java.lang.Object endKey_;
public EndKeyCase
getEndKeyCase() {
return EndKeyCase.forNumber(
endKeyCase_);
}
public Builder clearEndKey() {
endKeyCase_ = 0;
endKey_ = null;
onChanged();
return this;
}
/**
* <pre>
* Used when giving an inclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_closed = 1;</code>
*/
public com.google.protobuf.ByteString getStartKeyClosed() {
if (startKeyCase_ == 1) {
return (com.google.protobuf.ByteString) startKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
* <pre>
* Used when giving an inclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_closed = 1;</code>
*/
public Builder setStartKeyClosed(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
startKeyCase_ = 1;
startKey_ = value;
onChanged();
return this;
}
/**
* <pre>
* Used when giving an inclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_closed = 1;</code>
*/
public Builder clearStartKeyClosed() {
if (startKeyCase_ == 1) {
startKeyCase_ = 0;
startKey_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Used when giving an exclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_open = 2;</code>
*/
public com.google.protobuf.ByteString getStartKeyOpen() {
if (startKeyCase_ == 2) {
return (com.google.protobuf.ByteString) startKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
* <pre>
* Used when giving an exclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_open = 2;</code>
*/
public Builder setStartKeyOpen(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
startKeyCase_ = 2;
startKey_ = value;
onChanged();
return this;
}
/**
* <pre>
* Used when giving an exclusive lower bound for the range.
* </pre>
*
* <code>bytes start_key_open = 2;</code>
*/
public Builder clearStartKeyOpen() {
if (startKeyCase_ == 2) {
startKeyCase_ = 0;
startKey_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Used when giving an exclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_open = 3;</code>
*/
public com.google.protobuf.ByteString getEndKeyOpen() {
if (endKeyCase_ == 3) {
return (com.google.protobuf.ByteString) endKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
* <pre>
* Used when giving an exclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_open = 3;</code>
*/
public Builder setEndKeyOpen(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
endKeyCase_ = 3;
endKey_ = value;
onChanged();
return this;
}
/**
* <pre>
* Used when giving an exclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_open = 3;</code>
*/
public Builder clearEndKeyOpen() {
if (endKeyCase_ == 3) {
endKeyCase_ = 0;
endKey_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Used when giving an inclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_closed = 4;</code>
*/
public com.google.protobuf.ByteString getEndKeyClosed() {
if (endKeyCase_ == 4) {
return (com.google.protobuf.ByteString) endKey_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
* <pre>
* Used when giving an inclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_closed = 4;</code>
*/
public Builder setEndKeyClosed(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
endKeyCase_ = 4;
endKey_ = value;
onChanged();
return this;
}
/**
* <pre>
* Used when giving an inclusive upper bound for the range.
* </pre>
*
* <code>bytes end_key_closed = 4;</code>
*/
public Builder clearEndKeyClosed() {
if (endKeyCase_ == 4) {
endKeyCase_ = 0;
endKey_ = null;
onChanged();
}
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.bigtable.v2.RowRange)
}
// @@protoc_insertion_point(class_scope:google.bigtable.v2.RowRange)
private static final com.google.bigtable.v2.RowRange DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.bigtable.v2.RowRange();
}
public static com.google.bigtable.v2.RowRange getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RowRange>
PARSER = new com.google.protobuf.AbstractParser<RowRange>() {
public RowRange parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RowRange(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RowRange> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RowRange> getParserForType() {
return PARSER;
}
public com.google.bigtable.v2.RowRange getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.decision.configuration;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import org.apache.spark.SparkConf;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import com.datastax.driver.core.ProtocolOptions;
import com.stratio.decision.StreamingEngine;
import com.stratio.decision.commons.avro.Action;
import com.stratio.decision.commons.avro.ColumnType;
import com.stratio.decision.commons.avro.InsertMessage;
import com.stratio.decision.commons.constants.InternalTopic;
import com.stratio.decision.commons.constants.STREAM_OPERATIONS;
import com.stratio.decision.commons.constants.StreamAction;
import com.stratio.decision.commons.kafka.service.KafkaTopicService;
import com.stratio.decision.commons.messages.StratioStreamingMessage;
import com.stratio.decision.functions.messages.AvroDeserializeMessageFunction;
import com.stratio.decision.functions.FilterDataFunction;
import com.stratio.decision.functions.PairDataFunction;
import com.stratio.decision.functions.SaveToCassandraActionExecutionFunction;
import com.stratio.decision.functions.SaveToElasticSearchActionExecutionFunction;
import com.stratio.decision.functions.SaveToMongoActionExecutionFunction;
import com.stratio.decision.functions.SaveToSolrActionExecutionFunction;
import com.stratio.decision.functions.SendToKafkaActionExecutionFunction;
import com.stratio.decision.functions.SerializerFunction;
import com.stratio.decision.functions.dal.IndexStreamFunction;
import com.stratio.decision.functions.dal.ListenStreamFunction;
import com.stratio.decision.functions.dal.SaveToCassandraStreamFunction;
import com.stratio.decision.functions.dal.SaveToMongoStreamFunction;
import com.stratio.decision.functions.dal.SaveToSolrStreamFunction;
import com.stratio.decision.functions.dal.SendToDroolsStreamFunction;
import com.stratio.decision.functions.ddl.AddQueryToStreamFunction;
import com.stratio.decision.functions.ddl.AlterStreamFunction;
import com.stratio.decision.functions.ddl.CreateStreamFunction;
import com.stratio.decision.functions.dml.InsertIntoStreamFunction;
import com.stratio.decision.functions.dml.ListStreamsFunction;
import com.stratio.decision.functions.messages.FilterAvroMessagesByOperationFunction;
import com.stratio.decision.functions.messages.FilterMessagesByOperationFunction;
import com.stratio.decision.functions.messages.KeepPayloadFromMessageFunction;
import com.stratio.decision.serializer.impl.KafkaToJavaSerializer;
import com.stratio.decision.service.StreamOperationService;
@Configuration
@Import(ServiceConfiguration.class)
public class StreamingContextConfiguration {
private static Logger log = LoggerFactory.getLogger(StreamingContextConfiguration.class);
@Autowired
private ConfigurationContext configurationContext;
@Autowired
private StreamOperationService streamOperationService;
@Autowired
private KafkaToJavaSerializer kafkaToJavaSerializer;
private KafkaTopicService kafkaTopicService;
private JavaStreamingContext create(String streamingContextName, int port, long streamingBatchTime, String sparkHost) {
SparkConf conf = new SparkConf();
conf.set("spark.ui.port", String.valueOf(port));
conf.setAppName(streamingContextName);
conf.setJars(JavaStreamingContext.jarOfClass(StreamingEngine.class));
conf.setMaster(sparkHost);
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
conf.registerKryoClasses(new Class[] { StratioStreamingMessage.class, InsertMessage.class, ColumnType.class,
Action.class});
HashMap<String, String> tuningProperties = configurationContext.getSparkTunningProperties();
if (tuningProperties != null && tuningProperties.size() > 0) {
tuningProperties.forEach( (key, value) -> conf.set(key, value));
}
JavaStreamingContext streamingContext = new JavaStreamingContext(conf, new Duration(streamingBatchTime));
return streamingContext;
}
@Bean(name = "streamingContext", destroyMethod = "stop")
public JavaStreamingContext streamingContext() {
JavaStreamingContext context = this.create("stratio-streaming-context", 4040,
configurationContext.getInternalStreamingBatchTime(), configurationContext.getInternalSparkHost());
configureRequestContext(context);
configureActionContext(context);
configureDataContext(context);
return context;
}
private void configureRequestContext(JavaStreamingContext context) {
Map<String, Integer> baseTopicMap = new HashMap<>();
baseTopicMap.put(InternalTopic.TOPIC_REQUEST.getTopicName(), 1);
kafkaTopicService.createTopicIfNotExist(InternalTopic.TOPIC_REQUEST.getTopicName(), configurationContext.getKafkaReplicationFactor(), configurationContext.getKafkaPartitions());
/*
groupId must be the cluster groupId. Kafka assigns each partition of a topic to one, and one only, consumer of
the group.
Decision topics has only one partition (by default), so if we have two o more decision instances (consumers)
reading the same topic with the same groupId, only one instance will be able to read from the topic
*/
JavaPairDStream<String, String> messages = KafkaUtils.createStream(context,
configurationContext.getZookeeperHostsQuorumWithPath(), configurationContext.getGroupId(), baseTopicMap);
messages.cache();
KeepPayloadFromMessageFunction keepPayloadFromMessageFunction = new KeepPayloadFromMessageFunction();
CreateStreamFunction createStreamFunction = new CreateStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
AlterStreamFunction alterStreamFunction = new AlterStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
AddQueryToStreamFunction addQueryToStreamFunction = new AddQueryToStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
ListenStreamFunction listenStreamFunction = new ListenStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
ListStreamsFunction listStreamsFunction = new ListStreamsFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
if (configurationContext.getDroolsConfiguration() != null) {
SendToDroolsStreamFunction sendToDroolsStreamFunction = new SendToDroolsStreamFunction
(streamOperationService, configurationContext.getZookeeperHostsQuorumWithPath());
JavaDStream<StratioStreamingMessage> sendToDroolsRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.START_SENDTODROOLS)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopSendToDroolsRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_SENDTODROOLS)).map(
keepPayloadFromMessageFunction);
sendToDroolsRequests.foreachRDD(sendToDroolsStreamFunction);
stopSendToDroolsRequests.foreachRDD(sendToDroolsStreamFunction);
} else {
log.warn("Drools configuration not found.");
}
if (configurationContext.getCassandraHosts() != null) {
SaveToCassandraStreamFunction saveToCassandraStreamFunction = new SaveToCassandraStreamFunction(
streamOperationService, configurationContext.getZookeeperHostsQuorumWithPath());
JavaDStream<StratioStreamingMessage> saveToCassandraRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.SAVETO_CASSANDRA)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopSaveToCassandraRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_SAVETO_CASSANDRA)).map(
keepPayloadFromMessageFunction);
saveToCassandraRequests.foreachRDD(saveToCassandraStreamFunction);
stopSaveToCassandraRequests.foreachRDD(saveToCassandraStreamFunction);
} else {
log.warn("Cassandra configuration not found.");
}
if (configurationContext.getElasticSearchHosts() != null) {
IndexStreamFunction indexStreamFunction = new IndexStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
JavaDStream<StratioStreamingMessage> streamToIndexerRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.INDEX)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopStreamToIndexerRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_INDEX)).map(
keepPayloadFromMessageFunction);
streamToIndexerRequests.foreachRDD(indexStreamFunction);
stopStreamToIndexerRequests.foreachRDD(indexStreamFunction);
} else {
log.warn("Elasticsearch configuration not found.");
}
if (configurationContext.getSolrHost() != null) {
SaveToSolrStreamFunction solrStreamFunction = new SaveToSolrStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
JavaDStream<StratioStreamingMessage> saveToSolrRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.SAVETO_SOLR)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopSaveToSolrRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_SAVETO_SOLR)).map(
keepPayloadFromMessageFunction);
saveToSolrRequests.foreachRDD(solrStreamFunction);
stopSaveToSolrRequests.foreachRDD(solrStreamFunction);
} else {
log.warn("Solr configuration not found.");
}
if (configurationContext.getMongoHosts() != null) {
SaveToMongoStreamFunction saveToMongoStreamFunction = new SaveToMongoStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorumWithPath());
JavaDStream<StratioStreamingMessage> saveToMongoRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.SAVETO_MONGO)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopSaveToMongoRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_SAVETO_MONGO)).map(
keepPayloadFromMessageFunction);
saveToMongoRequests.foreachRDD(saveToMongoStreamFunction);
stopSaveToMongoRequests.foreachRDD(saveToMongoStreamFunction);
} else {
log.warn("Mongodb configuration not found.");
}
// Create a DStream for each command, so we can treat all related
// requests in the same way and also apply functions by command
JavaDStream<StratioStreamingMessage> createRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.DEFINITION.CREATE)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> alterRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.DEFINITION.ALTER)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> addQueryRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.DEFINITION.ADD_QUERY)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> removeQueryRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.DEFINITION.REMOVE_QUERY)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> listenRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.LISTEN)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> stopListenRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.ACTION.STOP_LISTEN)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> listRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.MANIPULATION.LIST)).map(
keepPayloadFromMessageFunction);
JavaDStream<StratioStreamingMessage> dropRequests = messages.filter(
new FilterMessagesByOperationFunction(STREAM_OPERATIONS.DEFINITION.DROP)).map(
keepPayloadFromMessageFunction);
createRequests.foreachRDD(createStreamFunction);
alterRequests.foreachRDD(alterStreamFunction);
addQueryRequests.foreachRDD(addQueryToStreamFunction);
removeQueryRequests.foreachRDD(addQueryToStreamFunction);
listenRequests.foreachRDD(listenStreamFunction);
stopListenRequests.foreachRDD(listenStreamFunction);
listRequests.foreachRDD(listStreamsFunction);
dropRequests.foreachRDD(createStreamFunction);
if (configurationContext.isAuditEnabled() || configurationContext.isStatsEnabled()) {
JavaDStream<StratioStreamingMessage> allRequests = createRequests.union(alterRequests)
.union(addQueryRequests).union(removeQueryRequests).union(listenRequests).union(stopListenRequests)
.union(listRequests).union(dropRequests);
// TODO enable audit functionality
// if (configurationContext.isAuditEnabled()) {
// SaveRequestsToAuditLogFunction saveRequestsToAuditLogFunction =
// new SaveRequestsToAuditLogFunction(
// configurationContext.getCassandraHostsQuorum());
//
// // persist the RDDs to cassandra using STRATIO DEEP
// allRequests.window(new Duration(2000), new
// Duration(6000)).foreachRDD(saveRequestsToAuditLogFunction);
// }
}
}
private void configureActionContext(JavaStreamingContext context) {
Map<String, Integer> baseTopicMap = new HashMap<>();
String topicName = InternalTopic.TOPIC_ACTION.getTopicName();
if (configurationContext.isClusteringEnabled() && configurationContext.getGroupId()!=null){
topicName = topicName.concat("_").concat(configurationContext.getGroupId());
}
baseTopicMap.put(topicName, 1);
kafkaTopicService.createTopicIfNotExist(topicName, configurationContext.getKafkaReplicationFactor(),
configurationContext.getKafkaPartitions());
HashMap<String, String> kafkaParams = new HashMap<>();
kafkaParams.put("zookeeper.connect", configurationContext.getZookeeperHostsQuorumWithPath());
kafkaParams.put("group.id", configurationContext.getGroupId());
/*
groupId must be the cluster groupId. Kafka assigns each partition of a topic to one, and one only, consumer of
the group.
Decision topics has only one partition (by default), so if we have two o more decision instances (consumers) reading the
same topic with the same groupId, only one instance will be able to read from the topic
*/
JavaPairDStream<String, byte[]> messages = KafkaUtils.createStream(context, String.class, byte[].class,
kafka.serializer.StringDecoder.class, kafka.serializer.DefaultDecoder.class, kafkaParams, baseTopicMap,
StorageLevel.MEMORY_AND_DISK_SER());
AvroDeserializeMessageFunction avroDeserializeMessageFunction = new AvroDeserializeMessageFunction();
JavaDStream<StratioStreamingMessage> parsedDataDstream = messages.map(avroDeserializeMessageFunction);
JavaPairDStream<StreamAction, StratioStreamingMessage> pairedDataDstream = parsedDataDstream
.mapPartitionsToPair(new PairDataFunction());
JavaPairDStream<StreamAction, Iterable<StratioStreamingMessage>> groupedDataDstream = pairedDataDstream
.groupByKey();
// groupedDataDstream.cache();
groupedDataDstream.persist(StorageLevel.MEMORY_AND_DISK_SER());
try {
SaveToCassandraActionExecutionFunction saveToCassandraActionExecutionFunction = new SaveToCassandraActionExecutionFunction(configurationContext.getCassandraHostsQuorum(),
ProtocolOptions.DEFAULT_PORT, configurationContext.getCassandraMaxBatchSize(),
configurationContext.getCassandraBatchType());
if (saveToCassandraActionExecutionFunction.check()) {
log.info("Cassandra is configured properly");
groupedDataDstream.filter(new FilterDataFunction(StreamAction.SAVE_TO_CASSANDRA)).foreachRDD(
saveToCassandraActionExecutionFunction);
} else {
log.warn("Cassandra is NOT configured properly");
}
SaveToMongoActionExecutionFunction saveToMongoActionExecutionFunction = new SaveToMongoActionExecutionFunction(configurationContext.getMongoHosts(),
configurationContext.getMongoUsername(), configurationContext
.getMongoPassword(), configurationContext.getMongoMaxBatchSize());
if (saveToMongoActionExecutionFunction.check()) {
log.info("MongoDB is configured properly");
groupedDataDstream.filter(new FilterDataFunction(StreamAction.SAVE_TO_MONGO)).foreachRDD(
saveToMongoActionExecutionFunction);
} else {
log.warn("MongoDB is NOT configured properly");
}
SaveToElasticSearchActionExecutionFunction saveToElasticSearchActionExecutionFunction = new SaveToElasticSearchActionExecutionFunction(configurationContext.getElasticSearchHosts(),
configurationContext.getElasticSearchClusterName(), configurationContext.getElasticSearchMaxBatchSize());
if (saveToElasticSearchActionExecutionFunction.check()) {
log.info("ElasticSearch is configured properly");
groupedDataDstream.filter(new FilterDataFunction(StreamAction.SAVE_TO_ELASTICSEARCH)).foreachRDD(saveToElasticSearchActionExecutionFunction);
} else {
log.warn("ElasticSearch is NOT configured properly");
}
SaveToSolrActionExecutionFunction saveToSolrActionExecutionFunction = new
SaveToSolrActionExecutionFunction(configurationContext.getSolrHost(), configurationContext
.getSolrCloudZkHost(),
configurationContext.getSolrCloud(),
configurationContext.getSolrDataDir(), configurationContext.getSolrMaxBatchSize());
if (saveToSolrActionExecutionFunction.check()) {
log.info("Solr is configured properly");
groupedDataDstream.filter(new FilterDataFunction(StreamAction.SAVE_TO_SOLR)).foreachRDD(
saveToSolrActionExecutionFunction);
} else {
log.warn("Solr is NOT configured properly");
}
groupedDataDstream.filter(new FilterDataFunction(StreamAction.LISTEN)).foreachRDD(
new SendToKafkaActionExecutionFunction(configurationContext.getKafkaHostsQuorum()));
} catch (Exception e) {
e.printStackTrace();
}
}
private void configureDataContext(JavaStreamingContext context) {
Map<String, Integer> baseTopicMap = new HashMap<>();
configurationContext.getDataTopics().forEach( dataTopic -> baseTopicMap.put(dataTopic, 1));
kafkaTopicService.createTopicsIfNotExist(configurationContext.getDataTopics(), configurationContext
.getKafkaReplicationFactor(), configurationContext.getKafkaPartitions());
HashMap<String, String> kafkaParams = new HashMap<>();
kafkaParams.put("zookeeper.connect", configurationContext.getZookeeperHostsQuorumWithPath());
kafkaParams.put("group.id", configurationContext.getGroupId());
/*
groupId must be the cluster groupId. Kafka assigns each partition of a topic to one, and one only, consumer of
the group.
Decision topics has only one partition (by default), so if we have two o more decision instances (consumers) reading the
same topic with the same groupId, only one instance will be able to read from the topic
*/
JavaPairDStream<String, byte[]> messages = KafkaUtils.createStream(context, String.class, byte[].class,
kafka.serializer.StringDecoder.class, kafka.serializer.DefaultDecoder.class, kafkaParams, baseTopicMap,
StorageLevel.MEMORY_AND_DISK_SER());
AvroDeserializeMessageFunction avroDeserializeMessageFunction = new AvroDeserializeMessageFunction();
JavaDStream<StratioStreamingMessage> insertRequests = messages.filter(
new FilterAvroMessagesByOperationFunction(STREAM_OPERATIONS.MANIPULATION.INSERT))
.map(avroDeserializeMessageFunction);
InsertIntoStreamFunction insertIntoStreamFunction = new InsertIntoStreamFunction(streamOperationService,
configurationContext.getZookeeperHostsQuorum());
insertRequests.foreachRDD(insertIntoStreamFunction);
}
@PostConstruct
private void initTopicService() {
kafkaTopicService = new KafkaTopicService(configurationContext.getZookeeperHostsQuorumWithPath(),
configurationContext.getKafkaConsumerBrokerHost(), configurationContext.getKafkaConsumerBrokerPort(),
configurationContext.getKafkaConnectionTimeout(), configurationContext.getKafkaSessionTimeout());
}
}
| |
/*
* Copyright (c) 2009-2013 Clark & Parsia, LLC. <http://www.clarkparsia.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.clarkparsia.empire.impl;
import com.clarkparsia.empire.ds.DataSource;
import com.clarkparsia.empire.QueryFactory;
import com.clarkparsia.empire.Empire;
import com.clarkparsia.empire.Dialect;
import com.google.common.collect.Maps;
import javax.persistence.Query;
import javax.persistence.NamedQuery;
import javax.persistence.QueryHint;
import javax.persistence.NamedQueries;
import javax.persistence.NamedNativeQuery;
import javax.persistence.NamedNativeQueries;
import java.util.HashMap;
import java.util.Map;
import java.util.Collection;
import java.util.Arrays;
/**
* <p>Implements the common operations of a {@link QueryFactory} and defers query language specific operations
* to concrete implementations of this class.</p>
*
* @author Michael Grove
* @since 0.1
* @version 0.6.5
*/
public class RdfQueryFactory implements QueryFactory {
/**
* the data source the queries will be executed against
*/
private DataSource mSource;
/**
* The query language dialect supported by this factory
*/
private Dialect mDialect;
/**
* User-defined NamedQueries. The actual queries are evaluated on-demand, we'll just keep the annotations which
* contain the information needed to create them here.
*/
private Map<String, NamedQueryInfo> mNamedQueries = Maps.newHashMap();
/**
* Create a new AbstractQueryFactory
* @param theSource the data source the queries will be executed against
* @param theDialect the Query dialect supported by this query factory
*/
public RdfQueryFactory(final DataSource theSource, Dialect theDialect) {
mSource = theSource;
mDialect = theDialect;
Collection<Class<?>> aClasses = Empire.get().getAnnotationProvider().getClassesWithAnnotation(NamedQuery.class);
for (Class<?> aClass : aClasses) {
addNamedQuery(new NamedQueryInfo(aClass.getAnnotation(NamedQuery.class)));
}
aClasses = Empire.get().getAnnotationProvider().getClassesWithAnnotation(NamedQueries.class);
for (Class<?> aClass : aClasses) {
NamedQueries aNamedQueries = aClass.getAnnotation(NamedQueries.class);
for (NamedQuery aQuery : aNamedQueries.value()) {
addNamedQuery(new NamedQueryInfo(aQuery));
}
}
aClasses = Empire.get().getAnnotationProvider().getClassesWithAnnotation(NamedNativeQuery.class);
for (Class<?> aClass : aClasses) {
addNamedQuery(new NamedQueryInfo(aClass.getAnnotation(NamedNativeQuery.class)));
}
aClasses = Empire.get().getAnnotationProvider().getClassesWithAnnotation(NamedNativeQueries.class);
for (Class<?> aClass : aClasses) {
NamedNativeQueries aQueries = aClass.getAnnotation(NamedNativeQueries.class);
for (NamedNativeQuery aQuery : aQueries.value()) {
addNamedQuery(new NamedQueryInfo(aQuery));
}
}
}
/**
* Create a new Query against the current data source with the given query string
* @param theQuery the query string
* @return a new query
*/
protected RdfQuery newQuery(String theQuery) {
return new RdfQuery(mSource, theQuery);
}
/**
* Return the data source the queries will be executed against
* @return the data source
*/
protected DataSource getSource() {
return mSource;
}
/**
* Add a named query to this factory
* @param theInfo the information about the query
*/
private void addNamedQuery(NamedQueryInfo theInfo) {
mNamedQueries.put(theInfo.getName(), theInfo);
}
/**
* @inheritDoc
*/
@Override
public Dialect getDialect() {
return mDialect;
}
/**
* @inheritDoc
*/
@Override
public Query createQuery(final String theQueryString) {
return newQuery(theQueryString);
}
/**
* @inheritDoc
*/
@Override
public Query createNamedQuery(final String theName) {
if (mNamedQueries.containsKey(theName)) {
NamedQueryInfo aNamedQuery = mNamedQueries.get(theName);
RdfQuery aQuery = newQuery(aNamedQuery.getQuery());
for (QueryHint aHint : aNamedQuery.getHints()) {
aQuery.setHint(aHint.name(), aHint.value());
}
aQuery.setSource(getSource());
return aQuery;
}
else {
throw new IllegalArgumentException("Query named '" + theName + "' does not exist.");
}
}
/**
* @inheritDoc
*/
@Override
public Query createNativeQuery(final String theQueryString) {
return newQuery(theQueryString);
}
/**
* @inheritDoc
*/
@Override
public Query createNativeQuery(final String theQueryString, final Class theResultClass) {
RdfQuery aQuery = newQuery(theQueryString);
aQuery.setBeanClass(theResultClass);
return aQuery;
}
/**
* @inheritDoc
*/
@Override
public Query createNativeQuery(final String theQueryString, final String theResultSetMapping) {
throw new UnsupportedOperationException();
}
private class NamedQueryInfo {
private String mName;
private String mQuery;
private Class mResultClass;
private Collection<QueryHint> mHints;
private String mResultMapping;
private NamedQueryInfo(final String theName, final String theQuery) {
mName = theName;
mQuery = theQuery;
}
private NamedQueryInfo(NamedQuery theQuery) {
mName = theQuery.name();
mQuery = theQuery.query();
mHints = Arrays.asList(theQuery.hints());
}
private NamedQueryInfo(NamedNativeQuery theQuery) {
mName = theQuery.name();
mQuery = theQuery.query();
mResultMapping = theQuery.resultSetMapping();
mResultClass = theQuery.resultClass();
mHints = Arrays.asList(theQuery.hints());
}
public String getName() {
return mName;
}
public void setName(final String theName) {
mName = theName;
}
public String getQuery() {
return mQuery;
}
public void setQuery(final String theQuery) {
mQuery = theQuery;
}
public Class getResultClass() {
return mResultClass;
}
public void setResultClass(final Class theResultClass) {
mResultClass = theResultClass;
}
public Collection<QueryHint> getHints() {
return mHints;
}
public void setHints(final Collection<QueryHint> theHints) {
mHints = theHints;
}
public String getResultMapping() {
return mResultMapping;
}
public void setResultMapping(final String theResultMapping) {
mResultMapping = theResultMapping;
}
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.component.data.paging;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import org.apache.commons.lang.BooleanUtils;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxChannel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.attributes.AjaxRequestAttributes;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.list.Loop;
import org.apache.wicket.markup.html.list.LoopItem;
import org.apache.wicket.markup.html.navigation.paging.IPageable;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.markup.repeater.AbstractRepeater;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
/**
* @author lazyman
*/
public class NavigatorPanel extends Panel {
private int PAGING_SIZE = 5;
private static final String ID_PREVIOUS = "previous";
private static final String ID_PREVIOUS_LINK = "previousLink";
private static final String ID_FIRST = "first";
private static final String ID_FIRST_LINK = "firstLink";
private static final String ID_LAST = "last";
private static final String ID_LAST_LINK = "lastLink";
// private static final String ID_DOTS = "dots";
private static final String ID_NAVIGATION = "navigation";
private static final String ID_PAGE_LINK = "pageLink";
private static final String ID_NEXT = "next";
private static final String ID_NEXT_LINK = "nextLink";
private final IPageable pageable;
private final IModel<Boolean> showPageListingModel;
public NavigatorPanel(String id, IPageable pageable, final boolean showPageListing) {
this(id, pageable, new AbstractReadOnlyModel<Boolean>() {
@Override
public Boolean getObject() {
return showPageListing;
}
});
}
public NavigatorPanel(String id, IPageable pageable, IModel<Boolean> showPageListingModel) {
super(id);
this.pageable = pageable;
this.showPageListingModel = showPageListingModel;
setOutputMarkupId(true);
add(new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
return NavigatorPanel.this.pageable.getPageCount() > 0;
}
});
initLayout();
}
private void initLayout() {
initFirst();
initPrevious();
initNavigation();
initNext();
initLast();
}
private void initPrevious() {
WebMarkupContainer previous = new WebMarkupContainer(ID_PREVIOUS);
previous.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
return isPreviousEnabled() ? "" : "disabled";
}
}));
add(previous);
AjaxLink previousLink = new AjaxLink(ID_PREVIOUS_LINK) {
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
attributes.setChannel(new AjaxChannel("blocking", AjaxChannel.Type.ACTIVE));
}
@Override
public void onClick(AjaxRequestTarget target) {
previousPerformed(target);
}
};
previousLink.add(new VisibleEnableBehaviour() {
@Override
public boolean isEnabled() {
return isPreviousEnabled();
}
});
previous.add(previousLink);
}
private void initFirst() {
WebMarkupContainer first = new WebMarkupContainer(ID_FIRST);
first.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
return isFirstEnabled() ? "" : "disabled";
}
}));
add(first);
AjaxLink firstLink = new AjaxLink(ID_FIRST_LINK) {
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
attributes.setChannel(new AjaxChannel("blocking", AjaxChannel.Type.ACTIVE));
}
@Override
public void onClick(AjaxRequestTarget target) {
firstPerformed(target);
}
};
firstLink.add(new VisibleEnableBehaviour() {
@Override
public boolean isEnabled() {
return BooleanUtils.isTrue(showPageListingModel.getObject()) && isFirstEnabled();
}
});
first.add(firstLink);
}
private void initNavigation() {
IModel<Integer> model = new AbstractReadOnlyModel<Integer>() {
@Override
public Integer getObject() {
int count = (int) pageable.getPageCount();
if (count < PAGING_SIZE) {
return count;
}
return PAGING_SIZE;
}
};
Loop navigation = new Loop(ID_NAVIGATION, model) {
@Override
protected void populateItem(final LoopItem item) {
final NavigatorPageLink pageLink = new NavigatorPageLink(ID_PAGE_LINK,
computePageNumber(item.getIndex())) {
@Override
public void onClick(AjaxRequestTarget target) {
pageLinkPerformed(target, getPageNumber());
}
};
item.add(pageLink);
item.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
return pageable.getCurrentPage() == pageLink.getPageNumber() ? "active" : "";
}
}));
}
};
navigation.add(new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
return BooleanUtils.isTrue(showPageListingModel.getObject());
}
});
add(navigation);
}
private long computePageNumber(int loopIndex) {
long current = pageable.getCurrentPage();
long count = pageable.getPageCount();
final long half = PAGING_SIZE / 2;
long result;
if (current - half <= 0) {
result = loopIndex;
} else if (current + half + 1 >= count) {
result = count - PAGING_SIZE + loopIndex;
} else {
result = current - half + loopIndex;
}
//TODO - this is just quick dirty fix for MID-1808. Fix algorithm later
if(count == 4 && current == 3){
result++;
}
return result;
}
private void initNext() {
WebMarkupContainer next = new WebMarkupContainer(ID_NEXT);
next.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
return isNextEnabled() ? "" : "disabled";
}
}));
add(next);
AjaxLink nextLink = new AjaxLink(ID_NEXT_LINK) {
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
attributes.setChannel(new AjaxChannel("blocking", AjaxChannel.Type.ACTIVE));
}
@Override
public void onClick(AjaxRequestTarget target) {
nextPerformed(target);
}
};
nextLink.add(new VisibleEnableBehaviour() {
@Override
public boolean isEnabled() {
return isNextEnabled();
}
});
next.add(nextLink);
}
private void initLast() {
WebMarkupContainer last = new WebMarkupContainer(ID_LAST);
last.add(new AttributeModifier("class", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
return isLastEnabled() ? "" : "disabled";
}
}));
add(last);
AjaxLink lastLink = new AjaxLink(ID_LAST_LINK) {
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
attributes.setChannel(new AjaxChannel("blocking", AjaxChannel.Type.ACTIVE));
}
@Override
public void onClick(AjaxRequestTarget target) {
lastPerformed(target);
}
};
lastLink.add(new VisibleEnableBehaviour() {
@Override
public boolean isEnabled() {
return BooleanUtils.isTrue(showPageListingModel.getObject()) && isLastEnabled();
}
});
last.add(lastLink);
}
private boolean isPreviousEnabled() {
return pageable.getCurrentPage() > 0;
}
private boolean isNextEnabled() {
return pageable.getCurrentPage() + 1 < pageable.getPageCount();
}
private boolean isFirstEnabled() {
return pageable.getCurrentPage() > 0;
}
private boolean isLastEnabled(){
return pageable.getCurrentPage() +1 < pageable.getPageCount();
}
private void previousPerformed(AjaxRequestTarget target) {
changeCurrentPage(target, pageable.getCurrentPage() - 1);
}
private void firstPerformed(AjaxRequestTarget target) {
changeCurrentPage(target, 0);
}
private void lastPerformed(AjaxRequestTarget target){
changeCurrentPage(target, pageable.getPageCount() - 1);
}
private void nextPerformed(AjaxRequestTarget target) {
changeCurrentPage(target, pageable.getCurrentPage() + 1);
}
private void changeCurrentPage(AjaxRequestTarget target, long page) {
pageable.setCurrentPage(page);
Component container = ((Component) pageable);
while (container instanceof AbstractRepeater) {
container = container.getParent();
}
target.add(container);
target.add(this);
onPageChanged(target, page);
}
private void pageLinkPerformed(AjaxRequestTarget target, long page) {
changeCurrentPage(target, page);
}
protected void onPageChanged(AjaxRequestTarget target, long page) {
}
}
| |
package com.jivesoftware.os.upena.deployable;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jivesoftware.os.amza.api.ring.RingHost;
import com.jivesoftware.os.amza.api.ring.RingMemberAndHost;
import com.jivesoftware.os.amza.service.AmzaService;
import com.jivesoftware.os.amza.service.ring.AmzaRingReader;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import com.jivesoftware.os.routing.bird.http.client.HttpRequestHelper;
import com.jivesoftware.os.routing.bird.http.client.HttpRequestHelperUtils;
import com.jivesoftware.os.routing.bird.shared.BoundedExecutor;
import com.jivesoftware.os.routing.bird.shared.InstanceDescriptor;
import com.jivesoftware.os.upena.deployable.region.SparseCircularHitsBucketBuffer;
import com.jivesoftware.os.upena.service.UpenaConfigStore;
import com.jivesoftware.os.upena.shared.HostKey;
import com.jivesoftware.os.upena.uba.service.Nanny;
import com.jivesoftware.os.upena.uba.service.UbaService;
import java.awt.Color;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
/**
* @author jonathan.colt
*/
public class UpenaHealth {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final ObjectMapper mapper = new ObjectMapper();
private final AmzaService amzaService;
private final UpenaSSLConfig upenaSSLConfig;
private final UpenaConfigStore upenaConfigStore;
private final UbaService ubaService;
private final RingHost ringHost;
private final HostKey ringHostKey;
private final long startupTime = System.currentTimeMillis();
private final ExecutorService executorService = BoundedExecutor.newBoundedExecutor(Runtime.getRuntime().availableProcessors(), "node-health");
public UpenaHealth(
AmzaService amzaService,
UpenaSSLConfig upenaSSLConfig,
UpenaConfigStore upenaConfigStore,
UbaService ubaService,
RingHost ringHost,
HostKey ringHostKey) {
this.amzaService = amzaService;
this.upenaSSLConfig = upenaSSLConfig;
this.upenaConfigStore = upenaConfigStore;
this.ubaService = ubaService;
this.ringHost = ringHost;
this.ringHostKey = ringHostKey;
this.mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
this.mapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false);
this.mapper.configure(DeserializationFeature.FAIL_ON_MISSING_CREATOR_PROPERTIES, false);
}
public String healthGradient() throws Exception {
ConcurrentMap<RingHost, NodeHealth> health = buildClusterHealth();
List<Double> healths = Lists.newArrayList();
for (NodeHealth nodeHealth : health.values()) {
for (NannyHealth nannyHealth : nodeHealth.nannyHealths) {
if (nannyHealth.instanceDescriptor.enabled) {
healths.add(nannyHealth.serviceHealth.health);
}
}
}
if (healths.size() == 1) {
healths.add(healths.get(0));
}
Collections.sort(healths);
List<String> gradient = Lists.newArrayList();
for (Double h : healths) {
String color = "rgba(" + trafficlightColorRGBA(h, 0.9f) + ")";
if (h < 0 || h > 1) {
color = "rgba(255,255,255,1)";
}
gradient.add(color);
}
return "linear-gradient(to right, " + Joiner.on(", ").join(gradient) + ")";
}
public final ConcurrentMap<RingHost, UpenaHealth.NodeHealth> nodeHealths = Maps.newConcurrentMap();
public final ConcurrentMap<String, Long> nodeRecency = Maps.newConcurrentMap();
public final ConcurrentMap<RingHost, Boolean> currentlyExecuting = Maps.newConcurrentMap();
public final ConcurrentMap<RingHost, Long> lastExecuted = Maps.newConcurrentMap();
public final Map<String, InstanceSparseCircularHitsBucketBuffer> instanceHealthHistory = new ConcurrentHashMap<>();
public ConcurrentMap<RingHost, NodeHealth> buildClusterHealth() throws Exception {
for (RingMemberAndHost ringMemberAndHost : amzaService.getRingReader().getRing(AmzaRingReader.SYSTEM_RING, 30_000L).entries) {
RingHost ringHost = ringMemberAndHost.ringHost;
if (currentlyExecuting.putIfAbsent(ringHost, true) == null) {
Long timestamp = lastExecuted.get(ringHost);
if (timestamp == null || timestamp + 1 < System.currentTimeMillis()) {
executorService.submit(() -> {
try {
HttpRequestHelper requestHelper = HttpRequestHelperUtils.buildRequestHelper(upenaSSLConfig.sslEnable,
upenaSSLConfig.allowSelfSignedCerts, upenaSSLConfig.signer, ringHost.getHost(), ringHost.getPort());
UpenaHealth.NodeHealth nodeHealth = requestHelper.executeGetRequest(
"/upena/health/instance", UpenaHealth.NodeHealth.class, null);
nodeHealths.put(ringHost, nodeHealth);
for (UpenaHealth.NannyHealth nannyHealth : nodeHealth.nannyHealths) {
instanceHealthHistory.compute(nannyHealth.instanceDescriptor.instanceKey, (instanceKey, instance) -> {
if (instance == null) {
instance = new InstanceSparseCircularHitsBucketBuffer(nannyHealth.instanceDescriptor,
new SparseCircularHitsBucketBuffer(60, 0, 1000));
}
instance.buffer.set(System.currentTimeMillis(), Math.max(0d, nannyHealth.serviceHealth.health));
return instance;
});
}
} catch (Exception x) {
UpenaHealth.NodeHealth nodeHealth = new UpenaHealth.NodeHealth("", ringHost.getHost(), ringHost.getPort());
nodeHealth.health = 0.0d;
nodeHealth.nannyHealths = new ArrayList<>();
nodeHealths.put(ringHost, nodeHealth);
LOG.warn("Failed getting cluster health for " + ringHost + " " + x);
} finally {
lastExecuted.put(ringHost, System.currentTimeMillis());
nodeRecency.put(ringHost.getHost() + ":" + ringHost.getPort(), System.currentTimeMillis());
currentlyExecuting.remove(ringHost);
}
});
} else {
currentlyExecuting.remove(ringHost);
}
}
}
return nodeHealths;
}
private static class InstanceSparseCircularHitsBucketBuffer {
public final InstanceDescriptor instanceDescriptor;
public final SparseCircularHitsBucketBuffer buffer;
public InstanceSparseCircularHitsBucketBuffer(InstanceDescriptor instanceDescriptor, SparseCircularHitsBucketBuffer circularHitsBucketBuffer) {
this.instanceDescriptor = instanceDescriptor;
this.buffer = circularHitsBucketBuffer;
}
}
public static String getHEXIdColor(double value, float sat) {
float hue = (float) value / 3f;
hue = (1f / 3f) + (hue * 2);
String s = Integer.toHexString(Color.HSBtoRGB(hue, sat, 1f) & 0xffffff);
return "000000".substring(s.length()) + s;
}
public static String idColorRGB(double value, float sat) {
float hue = (float) value / 3f;
hue = (1f / 3f) + (hue * 2);
Color color = new Color(Color.HSBtoRGB(hue, sat, 1f));
return color.getRed() + "," + color.getGreen() + "," + color.getBlue();
}
public static String getHEXTrafficlightColor(double value, float sat) {
String s = Integer.toHexString(Color.HSBtoRGB((float) value / 3f, sat, 1f) & 0xffffff);
return "000000".substring(s.length()) + s;
}
public static String trafficlightColorRGBA(double value, float sat) {
double alpha = Math.max(0, Math.min(1d, 1d - value));
Color color = new Color(Color.HSBtoRGB((float) value / 3f, sat, 1f));
return color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," + Math.log10(1d + (10 * alpha));
}
public NodeHealth buildNodeHealth() throws Exception {
NodeHealth nodeHealth = new NodeHealth(ringHostKey.getKey(), ringHost.getHost(), ringHost.getPort());
for (Map.Entry<String, Nanny> nanny : ubaService.iterateNannies()) {
Nanny n = nanny.getValue();
InstanceDescriptor id = n.getInstanceDescriptor();
List<String> log = n.getDeployLog().commitedLog();
List<String> copyLog = n.getHealthLog().commitedLog();
ServiceHealth serviceHealth = null;
try {
LOG.debug("copyLog {}", copyLog);
if (!copyLog.isEmpty()) {
serviceHealth = mapper.readValue(Joiner.on("").join(copyLog), ServiceHealth.class);
nodeHealth.health = Math.min(nodeHealth.health, serviceHealth.health);
}
} catch (Exception x) {
LOG.warn("Failed parsing service health for " + id + " " + Joiner.on("").join(copyLog), x);
nodeHealth.health = 0.0d;
log.add("Failed to parse serviceHealth" + x.getMessage());
}
if (serviceHealth == null) {
serviceHealth = new ServiceHealth();
serviceHealth.health = 0;
}
String uptime;
if (nanny.getValue().getStartTimeMillis() > 0) {
uptime = shortHumanReadableUptime(System.currentTimeMillis() - nanny.getValue().getStartTimeMillis());
} else {
uptime = ">" + shortHumanReadableUptime(System.currentTimeMillis() - startupTime);
}
NannyHealth nannyHealth = new NannyHealth(uptime, id, log, serviceHealth);
if (nanny.getValue().getUnexpectedRestartTimestamp() > -1) {
nannyHealth.unexpectedRestart = nanny.getValue().getUnexpectedRestartTimestamp();
}
Map<String, String> lastOverrideFetchedVersion = upenaConfigStore.changesSinceLastFetch(id.instanceKey, "override");
Map<String, String> lastOverrideHealthFetchedVersion = upenaConfigStore.changesSinceLastFetch(id.instanceKey, "override-health");
nannyHealth.configIsStale = lastOverrideFetchedVersion;
nannyHealth.healthConfigIsStale = lastOverrideHealthFetchedVersion;
nannyHealth.status = n.getStatus();
nodeHealth.nannyHealths.add(nannyHealth);
}
return nodeHealth;
}
static public class NodeHealth {
public double health = 1d;
public String hostKey;
public String host;
public int port;
public List<NannyHealth> nannyHealths = new ArrayList<>();
public NodeHealth() {
}
public NodeHealth(String hostKey, String host, int port) {
this.hostKey = hostKey;
this.host = host;
this.port = port;
}
}
static public class NannyHealth {
public String uptime;
public InstanceDescriptor instanceDescriptor;
public List<String> log;
public ServiceHealth serviceHealth;
public String status;
public long unexpectedRestart = -1;
public Map<String, String> configIsStale = new HashMap<>();
public Map<String, String> healthConfigIsStale = new HashMap<>();
public NannyHealth() {
}
public NannyHealth(String uptime, InstanceDescriptor instanceDescriptor, List<String> log, ServiceHealth serviceHealth) {
this.uptime = uptime;
this.instanceDescriptor = instanceDescriptor;
this.log = log;
this.serviceHealth = serviceHealth;
}
}
static public class ServiceHealth {
public String version = "unknown";
public boolean fullyOnline = false;
public double health = 0d;
public List<Health> healthChecks = new ArrayList<>();
}
static public class Health {
public String name;
public double health;
public String status;
public String description;
public String resolution;
public long timestamp;
public long checkIntervalMillis;
@Override
public String toString() {
return "Health{"
+ "name=" + name
+ ", health=" + health
+ ", status=" + status
+ ", description=" + description
+ ", resolution=" + resolution
+ ", timestamp=" + timestamp
+ ", checkIntervalMillis=" + checkIntervalMillis
+ '}';
}
}
public static String humanReadableUptime(long millis) {
if (millis < 0) {
return String.valueOf(millis);
}
long hours = TimeUnit.MILLISECONDS.toHours(millis);
millis -= TimeUnit.HOURS.toMillis(hours);
long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
millis -= TimeUnit.MINUTES.toMillis(minutes);
long seconds = TimeUnit.MILLISECONDS.toSeconds(millis);
StringBuilder sb = new StringBuilder(64);
if (hours < 10) {
sb.append('0');
}
sb.append(hours);
sb.append(":");
if (minutes < 10) {
sb.append('0');
}
sb.append(minutes);
sb.append(":");
if (seconds < 10) {
sb.append('0');
}
sb.append(seconds);
return (sb.toString());
}
public static String shortHumanReadableUptime(long millis) {
if (millis < 0) {
return String.valueOf(millis);
}
long days = TimeUnit.MILLISECONDS.toDays(millis);
millis -= TimeUnit.DAYS.toMillis(days);
long hours = TimeUnit.MILLISECONDS.toHours(millis);
millis -= TimeUnit.HOURS.toMillis(hours);
long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
millis -= TimeUnit.MINUTES.toMillis(minutes);
long seconds = TimeUnit.MILLISECONDS.toSeconds(millis);
StringBuilder sb = new StringBuilder(64);
if (days > 0) {
sb.append(days + "d ");
}
if (hours > 0) {
sb.append(hours + "h ");
}
if (minutes > 0) {
sb.append(minutes + "m ");
}
if (seconds > 0) {
sb.append(seconds + "s");
}
return sb.toString();
}
}
| |
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
//NON-STANDARD LICENCE HEADER HERE - THAT'S OK
//Class comes from Apache Commons Lang, added some tiny changes
package org.mockito.internal.matchers.apachecommons;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* <p>Assists in implementing {@link Object#equals(Object)} methods.</p>
*
* <p> This class provides methods to build a good equals method for any
* class. It follows rules laid out in
* <a href="http://java.sun.com/docs/books/effective/index.html">Effective Java</a>
* , by Joshua Bloch. In particular the rule for comparing <code>doubles</code>,
* <code>floats</code>, and arrays can be tricky. Also, making sure that
* <code>equals()</code> and <code>hashCode()</code> are consistent can be
* difficult.</p>
*
* <p>Two Objects that compare as equals must generate the same hash code,
* but two Objects with the same hash code do not have to be equal.</p>
*
* <p>All relevant fields should be included in the calculation of equals.
* Derived fields may be ignored. In particular, any field used in
* generating a hash code must be used in the equals method, and vice
* versa.</p>
*
* <p>Typical use for the code is as follows:</p>
* <pre class="code"><code class="java">
* public boolean equals(Object obj) {
* if (obj == null) { return false; }
* if (obj == this) { return true; }
* if (obj.getClass() != getClass()) {
* return false;
* }
* MyClass rhs = (MyClass) obj;
* return new EqualsBuilder()
* .appendSuper(super.equals(obj))
* .append(field1, rhs.field1)
* .append(field2, rhs.field2)
* .append(field3, rhs.field3)
* .isEquals();
* }
* </code></pre>
*
* <p> Alternatively, there is a method that uses reflection to determine
* the fields to test. Because these fields are usually private, the method,
* <code>reflectionEquals</code>, uses <code>AccessibleObject.setAccessible</code> to
* change the visibility of the fields. This will fail under a security
* manager, unless the appropriate permissions are set up correctly. It is
* also slower than testing explicitly.</p>
*
* <p> A typical invocation for this method would look like:</p>
* <pre class="code"><code class="java">
* public boolean equals(Object obj) {
* return EqualsBuilder.reflectionEquals(this, obj);
* }
* </code></pre>
*
* @author <a href="mailto:steve.downey@netfolio.com">Steve Downey</a>
* @author Stephen Colebourne
* @author Gary Gregory
* @author Pete Gieser
* @author Arun Mammen Thomas
* @since 1.0
* @version $Id: EqualsBuilder.java 611543 2008-01-13 07:00:22Z bayard $
*/
@SuppressWarnings("unchecked")
class EqualsBuilder {
/**
* If the fields tested are equals.
* The default value is <code>true</code>.
*/
private boolean isEquals = true;
/**
* <p>Constructor for EqualsBuilder.</p>
*
* <p>Starts off assuming that equals is <code>true</code>.</p>
* @see Object#equals(Object)
*/
public EqualsBuilder() {
// do nothing for now.
}
//-------------------------------------------------------------------------
/**
* <p>This method uses reflection to determine if the two <code>Object</code>s
* are equal.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly.</p>
*
* <p>Transient members will be not be tested, as they are likely derived
* fields, and not part of the value of the Object.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs <code>this</code> object
* @param rhs the other object
* @return <code>true</code> if the two Objects have tested equals.
*/
public static boolean reflectionEquals(Object lhs, Object rhs) {
return reflectionEquals(lhs, rhs, false, null, null);
}
/**
* <p>This method uses reflection to determine if the two <code>Object</code>s
* are equal.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly.</p>
*
* <p>Transient members will be not be tested, as they are likely derived
* fields, and not part of the value of the Object.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs <code>this</code> object
* @param rhs the other object
* @param excludeFields array of field names to exclude from testing
* @return <code>true</code> if the two Objects have tested equals.
*/
public static boolean reflectionEquals(Object lhs, Object rhs, String[] excludeFields) {
return reflectionEquals(lhs, rhs, false, null, excludeFields);
}
/**
* <p>This method uses reflection to determine if the two <code>Object</code>s
* are equal.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly.</p>
*
* <p>If the TestTransients parameter is set to <code>true</code>, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the <code>Object</code>.</p>
*
* <p>Static fields will not be tested. Superclass fields will be included.</p>
*
* @param lhs <code>this</code> object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @return <code>true</code> if the two Objects have tested equals.
*/
public static boolean reflectionEquals(Object lhs, Object rhs, boolean testTransients) {
return reflectionEquals(lhs, rhs, testTransients, null, null);
}
/**
* <p>This method uses reflection to determine if the two <code>Object</code>s
* are equal.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly.</p>
*
* <p>If the testTransients parameter is set to <code>true</code>, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the <code>Object</code>.</p>
*
* <p>Static fields will not be included. Superclass fields will be appended
* up to and including the specified superclass. A null superclass is treated
* as java.lang.Object.</p>
*
* @param lhs <code>this</code> object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @param reflectUpToClass the superclass to reflect up to (inclusive),
* may be <code>null</code>
* @return <code>true</code> if the two Objects have tested equals.
* @since 2.0
*/
public static boolean reflectionEquals(Object lhs, Object rhs, boolean testTransients, Class reflectUpToClass) {
return reflectionEquals(lhs, rhs, testTransients, reflectUpToClass, null);
}
/**
* <p>This method uses reflection to determine if the two <code>Object</code>s
* are equal.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run under
* a security manager, if the permissions are not set up correctly. It is also
* not as efficient as testing explicitly.</p>
*
* <p>If the testTransients parameter is set to <code>true</code>, transient
* members will be tested, otherwise they are ignored, as they are likely
* derived fields, and not part of the value of the <code>Object</code>.</p>
*
* <p>Static fields will not be included. Superclass fields will be appended
* up to and including the specified superclass. A null superclass is treated
* as java.lang.Object.</p>
*
* @param lhs <code>this</code> object
* @param rhs the other object
* @param testTransients whether to include transient fields
* @param reflectUpToClass the superclass to reflect up to (inclusive),
* may be <code>null</code>
* @param excludeFields array of field names to exclude from testing
* @return <code>true</code> if the two Objects have tested equals.
* @since 2.0
*/
public static boolean reflectionEquals(Object lhs, Object rhs, boolean testTransients, Class reflectUpToClass,
String[] excludeFields) {
if (lhs == rhs) {
return true;
}
if (lhs == null || rhs == null) {
return false;
}
// Find the leaf class since there may be transients in the leaf
// class or in classes between the leaf and root.
// If we are not testing transients or a subclass has no ivars,
// then a subclass can test equals to a superclass.
Class lhsClass = lhs.getClass();
Class rhsClass = rhs.getClass();
Class testClass;
if (lhsClass.isInstance(rhs)) {
testClass = lhsClass;
if (!rhsClass.isInstance(lhs)) {
// rhsClass is a subclass of lhsClass
testClass = rhsClass;
}
} else if (rhsClass.isInstance(lhs)) {
testClass = rhsClass;
if (!lhsClass.isInstance(rhs)) {
// lhsClass is a subclass of rhsClass
testClass = lhsClass;
}
} else {
// The two classes are not related.
return false;
}
EqualsBuilder equalsBuilder = new EqualsBuilder();
try {
reflectionAppend(lhs, rhs, testClass, equalsBuilder, testTransients, excludeFields);
while (testClass.getSuperclass() != null && testClass != reflectUpToClass) {
testClass = testClass.getSuperclass();
reflectionAppend(lhs, rhs, testClass, equalsBuilder, testTransients, excludeFields);
}
} catch (IllegalArgumentException e) {
// In this case, we tried to test a subclass vs. a superclass and
// the subclass has ivars or the ivars are transient and
// we are testing transients.
// If a subclass has ivars that we are trying to test them, we get an
// exception and we know that the objects are not equal.
return false;
}
return equalsBuilder.isEquals();
}
/**
* <p>Appends the fields and values defined by the given object of the
* given Class.</p>
*
* @param lhs the left hand object
* @param rhs the right hand object
* @param clazz the class to append details of
* @param builder the builder to append to
* @param useTransients whether to test transient fields
* @param excludeFields array of field names to exclude from testing
*/
private static void reflectionAppend(
Object lhs,
Object rhs,
Class clazz,
EqualsBuilder builder,
boolean useTransients,
String[] excludeFields) {
Field[] fields = clazz.getDeclaredFields();
List excludedFieldList = excludeFields != null ? Arrays.asList(excludeFields) : Collections.EMPTY_LIST;
AccessibleObject.setAccessible(fields, true);
for (int i = 0; i < fields.length && builder.isEquals; i++) {
Field f = fields[i];
if (!excludedFieldList.contains(f.getName())
&& (f.getName().indexOf('$') == -1)
&& (useTransients || !Modifier.isTransient(f.getModifiers()))
&& (!Modifier.isStatic(f.getModifiers()))) {
try {
builder.append(f.get(lhs), f.get(rhs));
} catch (IllegalAccessException e) {
//this can't happen. Would get a Security exception instead
//throw a runtime exception in case the impossible happens.
throw new InternalError("Unexpected IllegalAccessException");
}
}
}
}
//-------------------------------------------------------------------------
/**
* <p>Adds the result of <code>super.equals()</code> to this builder.</p>
*
* @param superEquals the result of calling <code>super.equals()</code>
* @return EqualsBuilder - used to chain calls.
* @since 2.0
*/
public EqualsBuilder appendSuper(boolean superEquals) {
if (isEquals == false) {
return this;
}
isEquals = superEquals;
return this;
}
//-------------------------------------------------------------------------
/**
* <p>Test if two <code>Object</code>s are equal using their
* <code>equals</code> method.</p>
*
* @param lhs the left hand object
* @param rhs the right hand object
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(Object lhs, Object rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
Class lhsClass = lhs.getClass();
if (!lhsClass.isArray()) {
if (lhs instanceof java.math.BigDecimal && rhs instanceof java.math.BigDecimal) {
isEquals = (((java.math.BigDecimal) lhs).compareTo((java.math.BigDecimal) rhs) == 0);
} else {
// The simple case, not an array, just test the element
isEquals = lhs.equals(rhs);
}
} else if (lhs.getClass() != rhs.getClass()) {
// Here when we compare different dimensions, for example: a boolean[][] to a boolean[]
this.setEquals(false);
// 'Switch' on type of array, to dispatch to the correct handler
// This handles multi dimensional arrays of the same depth
} else if (lhs instanceof long[]) {
append((long[]) lhs, (long[]) rhs);
} else if (lhs instanceof int[]) {
append((int[]) lhs, (int[]) rhs);
} else if (lhs instanceof short[]) {
append((short[]) lhs, (short[]) rhs);
} else if (lhs instanceof char[]) {
append((char[]) lhs, (char[]) rhs);
} else if (lhs instanceof byte[]) {
append((byte[]) lhs, (byte[]) rhs);
} else if (lhs instanceof double[]) {
append((double[]) lhs, (double[]) rhs);
} else if (lhs instanceof float[]) {
append((float[]) lhs, (float[]) rhs);
} else if (lhs instanceof boolean[]) {
append((boolean[]) lhs, (boolean[]) rhs);
} else {
// Not an array of primitives
append((Object[]) lhs, (Object[]) rhs);
}
return this;
}
/**
* <p>
* Test if two <code>long</code> s are equal.
* </p>
*
* @param lhs
* the left hand <code>long</code>
* @param rhs
* the right hand <code>long</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(long lhs, long rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Test if two <code>int</code>s are equal.</p>
*
* @param lhs the left hand <code>int</code>
* @param rhs the right hand <code>int</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(int lhs, int rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Test if two <code>short</code>s are equal.</p>
*
* @param lhs the left hand <code>short</code>
* @param rhs the right hand <code>short</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(short lhs, short rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Test if two <code>char</code>s are equal.</p>
*
* @param lhs the left hand <code>char</code>
* @param rhs the right hand <code>char</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(char lhs, char rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Test if two <code>byte</code>s are equal.</p>
*
* @param lhs the left hand <code>byte</code>
* @param rhs the right hand <code>byte</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(byte lhs, byte rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Test if two <code>double</code>s are equal by testing that the
* pattern of bits returned by <code>doubleToLong</code> are equal.</p>
*
* <p>This handles NaNs, Infinities, and <code>-0.0</code>.</p>
*
* <p>It is compatible with the hash code generated by
* <code>HashCodeBuilder</code>.</p>
*
* @param lhs the left hand <code>double</code>
* @param rhs the right hand <code>double</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(double lhs, double rhs) {
if (isEquals == false) {
return this;
}
return append(Double.doubleToLongBits(lhs), Double.doubleToLongBits(rhs));
}
/**
* <p>Test if two <code>float</code>s are equal byt testing that the
* pattern of bits returned by doubleToLong are equal.</p>
*
* <p>This handles NaNs, Infinities, and <code>-0.0</code>.</p>
*
* <p>It is compatible with the hash code generated by
* <code>HashCodeBuilder</code>.</p>
*
* @param lhs the left hand <code>float</code>
* @param rhs the right hand <code>float</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(float lhs, float rhs) {
if (isEquals == false) {
return this;
}
return append(Float.floatToIntBits(lhs), Float.floatToIntBits(rhs));
}
/**
* <p>Test if two <code>booleans</code>s are equal.</p>
*
* @param lhs the left hand <code>boolean</code>
* @param rhs the right hand <code>boolean</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(boolean lhs, boolean rhs) {
if (isEquals == false) {
return this;
}
isEquals = (lhs == rhs);
return this;
}
/**
* <p>Performs a deep comparison of two <code>Object</code> arrays.</p>
*
* <p>This also will be called for the top level of
* multi-dimensional, ragged, and multi-typed arrays.</p>
*
* @param lhs the left hand <code>Object[]</code>
* @param rhs the right hand <code>Object[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(Object[] lhs, Object[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>long</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(long, long)} is used.</p>
*
* @param lhs the left hand <code>long[]</code>
* @param rhs the right hand <code>long[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(long[] lhs, long[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>int</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(int, int)} is used.</p>
*
* @param lhs the left hand <code>int[]</code>
* @param rhs the right hand <code>int[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(int[] lhs, int[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>short</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(short, short)} is used.</p>
*
* @param lhs the left hand <code>short[]</code>
* @param rhs the right hand <code>short[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(short[] lhs, short[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>char</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(char, char)} is used.</p>
*
* @param lhs the left hand <code>char[]</code>
* @param rhs the right hand <code>char[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(char[] lhs, char[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>byte</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(byte, byte)} is used.</p>
*
* @param lhs the left hand <code>byte[]</code>
* @param rhs the right hand <code>byte[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(byte[] lhs, byte[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>double</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(double, double)} is used.</p>
*
* @param lhs the left hand <code>double[]</code>
* @param rhs the right hand <code>double[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(double[] lhs, double[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>float</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(float, float)} is used.</p>
*
* @param lhs the left hand <code>float[]</code>
* @param rhs the right hand <code>float[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(float[] lhs, float[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Deep comparison of array of <code>boolean</code>. Length and all
* values are compared.</p>
*
* <p>The method {@link #append(boolean, boolean)} is used.</p>
*
* @param lhs the left hand <code>boolean[]</code>
* @param rhs the right hand <code>boolean[]</code>
* @return EqualsBuilder - used to chain calls.
*/
public EqualsBuilder append(boolean[] lhs, boolean[] rhs) {
if (isEquals == false) {
return this;
}
if (lhs == rhs) {
return this;
}
if (lhs == null || rhs == null) {
this.setEquals(false);
return this;
}
if (lhs.length != rhs.length) {
this.setEquals(false);
return this;
}
for (int i = 0; i < lhs.length && isEquals; ++i) {
append(lhs[i], rhs[i]);
}
return this;
}
/**
* <p>Returns <code>true</code> if the fields that have been checked
* are all equal.</p>
*
* @return boolean
*/
public boolean isEquals() {
return this.isEquals;
}
/**
* Sets the <code>isEquals</code> value.
*
* @param isEquals The value to set.
* @since 2.1
*/
protected void setEquals(boolean isEquals) {
this.isEquals = isEquals;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.locator;
import java.io.DataInputStream;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.File;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.URL;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.gms.ApplicationState;
import org.apache.cassandra.gms.EndpointState;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.JVMStabilityInspector;
/**
* A snitch that assumes a Cloudstack Zone follows the typical convention
* <country>-<location>-<availability zone> and uses the country/location
* tuple as a datacenter and the availability zone as a rack
*/
public class CloudstackSnitch extends AbstractNetworkTopologySnitch
{
protected static final Logger logger = LoggerFactory.getLogger(CloudstackSnitch.class);
protected static final String ZONE_NAME_QUERY_URI = "/latest/meta-data/availability-zone";
private Map<InetAddress, Map<String, String>> savedEndpoints;
private static final String DEFAULT_DC = "UNKNOWN-DC";
private static final String DEFAULT_RACK = "UNKNOWN-RACK";
private static final String[] LEASE_FILES =
{
"file:///var/lib/dhcp/dhclient.eth0.leases",
"file:///var/lib/dhclient/dhclient.eth0.leases"
};
protected String csZoneDc;
protected String csZoneRack;
public CloudstackSnitch() throws IOException, ConfigurationException
{
String endpoint = csMetadataEndpoint();
String zone = csQueryMetadata(endpoint + ZONE_NAME_QUERY_URI);
String zone_parts[] = zone.split("-");
if (zone_parts.length != 3)
{
throw new ConfigurationException("CloudstackSnitch cannot handle invalid zone format: " + zone);
}
csZoneDc = zone_parts[0] + "-" + zone_parts[1];
csZoneRack = zone_parts[2];
}
public String getRack(InetAddress endpoint)
{
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
return csZoneRack;
EndpointState state = Gossiper.instance.getEndpointStateForEndpoint(endpoint);
if (state == null || state.getApplicationState(ApplicationState.RACK) == null)
{
if (savedEndpoints == null)
savedEndpoints = SystemKeyspace.loadDcRackInfo();
if (savedEndpoints.containsKey(endpoint))
return savedEndpoints.get(endpoint).get("rack");
return DEFAULT_RACK;
}
return state.getApplicationState(ApplicationState.RACK).value;
}
public String getDatacenter(InetAddress endpoint)
{
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
return csZoneDc;
EndpointState state = Gossiper.instance.getEndpointStateForEndpoint(endpoint);
if (state == null || state.getApplicationState(ApplicationState.DC) == null)
{
if (savedEndpoints == null)
savedEndpoints = SystemKeyspace.loadDcRackInfo();
if (savedEndpoints.containsKey(endpoint))
return savedEndpoints.get(endpoint).get("data_center");
return DEFAULT_DC;
}
return state.getApplicationState(ApplicationState.DC).value;
}
String csQueryMetadata(String url) throws ConfigurationException, IOException
{
HttpURLConnection conn = null;
DataInputStream is = null;
try
{
conn = (HttpURLConnection) new URL(url).openConnection();
}
catch (Exception e)
{
throw new ConfigurationException("CloudstackSnitch cannot query wrong metadata URL: " + url);
}
try
{
conn.setRequestMethod("GET");
if (conn.getResponseCode() != 200)
{
throw new ConfigurationException("CloudstackSnitch was unable to query metadata.");
}
int cl = conn.getContentLength();
byte[] b = new byte[cl];
is = new DataInputStream(new BufferedInputStream(conn.getInputStream()));
is.readFully(b);
return new String(b, StandardCharsets.UTF_8);
}
finally
{
FileUtils.close(is);
conn.disconnect();
}
}
String csMetadataEndpoint() throws ConfigurationException
{
for (String lease_uri: LEASE_FILES)
{
try
{
File lease_file = new File(new URI(lease_uri));
if (lease_file.exists())
{
return csEndpointFromLease(lease_file);
}
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
continue;
}
}
throw new ConfigurationException("No valid DHCP lease file could be found.");
}
String csEndpointFromLease(File lease) throws ConfigurationException
{
BufferedReader reader = null;
String line = null;
String endpoint = null;
Pattern identifierPattern = Pattern.compile("^[ \t]*option dhcp-server-identifier (.*);$");
try
{
reader = new BufferedReader(new FileReader(lease));
while ((line = reader.readLine()) != null)
{
Matcher matcher = identifierPattern.matcher(line);
if (matcher.find())
{
endpoint = matcher.group(1);
break;
}
}
}
catch (Exception e)
{
throw new ConfigurationException("CloudstackSnitch cannot access lease file.");
}
finally
{
FileUtils.closeQuietly(reader);
}
if (endpoint == null)
{
throw new ConfigurationException("No metadata server could be found in lease file.");
}
return "http://" + endpoint;
}
}
| |
/*
* Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.java.swing.plaf.motif;
import javax.swing.*;
import javax.swing.filechooser.*;
import javax.swing.event.*;
import javax.swing.plaf.*;
import javax.swing.plaf.basic.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
import sun.awt.shell.ShellFolder;
import sun.swing.SwingUtilities2;
/**
* Motif FileChooserUI.
*
* @author Jeff Dinkins
*/
public class MotifFileChooserUI extends BasicFileChooserUI {
private FilterComboBoxModel filterComboBoxModel;
protected JList directoryList = null;
protected JList fileList = null;
protected JTextField pathField = null;
protected JComboBox filterComboBox = null;
protected JTextField filenameTextField = null;
private static final Dimension hstrut10 = new Dimension(10, 1);
private static final Dimension vstrut10 = new Dimension(1, 10);
private static final Insets insets = new Insets(10, 10, 10, 10);
private static Dimension prefListSize = new Dimension(75, 150);
private static Dimension WITH_ACCELERATOR_PREF_SIZE = new Dimension(650, 450);
private static Dimension PREF_SIZE = new Dimension(350, 450);
private static Dimension MIN_SIZE = new Dimension(200, 300);
private static Dimension PREF_ACC_SIZE = new Dimension(10, 10);
private static Dimension ZERO_ACC_SIZE = new Dimension(1, 1);
private static Dimension MAX_SIZE = new Dimension(Short.MAX_VALUE, Short.MAX_VALUE);
private static final Insets buttonMargin = new Insets(3, 3, 3, 3);
private JPanel bottomPanel;
protected JButton approveButton;
private String enterFolderNameLabelText = null;
private int enterFolderNameLabelMnemonic = 0;
private String enterFileNameLabelText = null;
private int enterFileNameLabelMnemonic = 0;
private String filesLabelText = null;
private int filesLabelMnemonic = 0;
private String foldersLabelText = null;
private int foldersLabelMnemonic = 0;
private String pathLabelText = null;
private int pathLabelMnemonic = 0;
private String filterLabelText = null;
private int filterLabelMnemonic = 0;
private JLabel fileNameLabel;
private void populateFileNameLabel() {
if (getFileChooser().getFileSelectionMode() == JFileChooser.DIRECTORIES_ONLY) {
fileNameLabel.setText(enterFolderNameLabelText);
fileNameLabel.setDisplayedMnemonic(enterFolderNameLabelMnemonic);
} else {
fileNameLabel.setText(enterFileNameLabelText);
fileNameLabel.setDisplayedMnemonic(enterFileNameLabelMnemonic);
}
}
private String fileNameString(File file) {
if (file == null) {
return null;
} else {
JFileChooser fc = getFileChooser();
if (fc.isDirectorySelectionEnabled() && !fc.isFileSelectionEnabled()) {
return file.getPath();
} else {
return file.getName();
}
}
}
private String fileNameString(File[] files) {
StringBuffer buf = new StringBuffer();
for (int i = 0; files != null && i < files.length; i++) {
if (i > 0) {
buf.append(" ");
}
if (files.length > 1) {
buf.append("\"");
}
buf.append(fileNameString(files[i]));
if (files.length > 1) {
buf.append("\"");
}
}
return buf.toString();
}
public MotifFileChooserUI(JFileChooser filechooser) {
super(filechooser);
}
public String getFileName() {
if(filenameTextField != null) {
return filenameTextField.getText();
} else {
return null;
}
}
public void setFileName(String filename) {
if(filenameTextField != null) {
filenameTextField.setText(filename);
}
}
public String getDirectoryName() {
return pathField.getText();
}
public void setDirectoryName(String dirname) {
pathField.setText(dirname);
}
public void ensureFileIsVisible(JFileChooser fc, File f) {
// PENDING(jeff)
}
public void rescanCurrentDirectory(JFileChooser fc) {
getModel().validateFileCache();
}
public PropertyChangeListener createPropertyChangeListener(JFileChooser fc) {
return new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent e) {
String prop = e.getPropertyName();
if(prop.equals(JFileChooser.SELECTED_FILE_CHANGED_PROPERTY)) {
File f = (File) e.getNewValue();
if(f != null) {
setFileName(getFileChooser().getName(f));
}
} else if (prop.equals(JFileChooser.SELECTED_FILES_CHANGED_PROPERTY)) {
File[] files = (File[]) e.getNewValue();
JFileChooser fc = getFileChooser();
if (files != null && files.length > 0 && (files.length > 1 || fc.isDirectorySelectionEnabled()
|| !files[0].isDirectory())) {
setFileName(fileNameString(files));
}
} else if (prop.equals(JFileChooser.FILE_FILTER_CHANGED_PROPERTY)) {
fileList.clearSelection();
} else if(prop.equals(JFileChooser.DIRECTORY_CHANGED_PROPERTY)) {
directoryList.clearSelection();
ListSelectionModel sm = directoryList.getSelectionModel();
if (sm instanceof DefaultListSelectionModel) {
((DefaultListSelectionModel)sm).moveLeadSelectionIndex(0);
sm.setAnchorSelectionIndex(0);
}
fileList.clearSelection();
sm = fileList.getSelectionModel();
if (sm instanceof DefaultListSelectionModel) {
((DefaultListSelectionModel)sm).moveLeadSelectionIndex(0);
sm.setAnchorSelectionIndex(0);
}
File currentDirectory = getFileChooser().getCurrentDirectory();
if(currentDirectory != null) {
try {
setDirectoryName(ShellFolder.getNormalizedFile((File)e.getNewValue()).getPath());
} catch (IOException ioe) {
setDirectoryName(((File)e.getNewValue()).getAbsolutePath());
}
if ((getFileChooser().getFileSelectionMode() == JFileChooser.DIRECTORIES_ONLY) && !getFileChooser().isMultiSelectionEnabled()) {
setFileName(getDirectoryName());
}
}
} else if(prop.equals(JFileChooser.FILE_SELECTION_MODE_CHANGED_PROPERTY)) {
if (fileNameLabel != null) {
populateFileNameLabel();
}
directoryList.clearSelection();
} else if (prop.equals(JFileChooser.MULTI_SELECTION_ENABLED_CHANGED_PROPERTY)) {
if(getFileChooser().isMultiSelectionEnabled()) {
fileList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
} else {
fileList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
fileList.clearSelection();
getFileChooser().setSelectedFiles(null);
}
} else if (prop.equals(JFileChooser.ACCESSORY_CHANGED_PROPERTY)) {
if(getAccessoryPanel() != null) {
if(e.getOldValue() != null) {
getAccessoryPanel().remove((JComponent) e.getOldValue());
}
JComponent accessory = (JComponent) e.getNewValue();
if(accessory != null) {
getAccessoryPanel().add(accessory, BorderLayout.CENTER);
getAccessoryPanel().setPreferredSize(PREF_ACC_SIZE);
getAccessoryPanel().setMaximumSize(MAX_SIZE);
} else {
getAccessoryPanel().setPreferredSize(ZERO_ACC_SIZE);
getAccessoryPanel().setMaximumSize(ZERO_ACC_SIZE);
}
}
} else if (prop.equals(JFileChooser.APPROVE_BUTTON_TEXT_CHANGED_PROPERTY) ||
prop.equals(JFileChooser.APPROVE_BUTTON_TOOL_TIP_TEXT_CHANGED_PROPERTY) ||
prop.equals(JFileChooser.DIALOG_TYPE_CHANGED_PROPERTY)) {
approveButton.setText(getApproveButtonText(getFileChooser()));
approveButton.setToolTipText(getApproveButtonToolTipText(getFileChooser()));
} else if (prop.equals(JFileChooser.CONTROL_BUTTONS_ARE_SHOWN_CHANGED_PROPERTY)) {
doControlButtonsChanged(e);
} else if (prop.equals("componentOrientation")) {
ComponentOrientation o = (ComponentOrientation)e.getNewValue();
JFileChooser cc = (JFileChooser)e.getSource();
if (o != (ComponentOrientation)e.getOldValue()) {
cc.applyComponentOrientation(o);
}
}
}
};
}
//
// ComponentUI Interface Implementation methods
//
public static ComponentUI createUI(JComponent c) {
return new MotifFileChooserUI((JFileChooser)c);
}
public void installUI(JComponent c) {
super.installUI(c);
}
public void uninstallUI(JComponent c) {
getFileChooser().removeAll();
super.uninstallUI(c);
}
public void installComponents(JFileChooser fc) {
fc.setLayout(new BorderLayout(10, 10));
fc.setAlignmentX(JComponent.CENTER_ALIGNMENT);
JPanel interior = new JPanel() {
public Insets getInsets() {
return insets;
}
};
interior.setInheritsPopupMenu(true);
align(interior);
interior.setLayout(new BoxLayout(interior, BoxLayout.PAGE_AXIS));
fc.add(interior, BorderLayout.CENTER);
// PENDING(jeff) - I18N
JLabel l = new JLabel(pathLabelText);
l.setDisplayedMnemonic(pathLabelMnemonic);
align(l);
interior.add(l);
File currentDirectory = fc.getCurrentDirectory();
String curDirName = null;
if(currentDirectory != null) {
curDirName = currentDirectory.getPath();
}
pathField = new JTextField(curDirName) {
public Dimension getMaximumSize() {
Dimension d = super.getMaximumSize();
d.height = getPreferredSize().height;
return d;
}
};
pathField.setInheritsPopupMenu(true);
l.setLabelFor(pathField);
align(pathField);
// Change to folder on return
pathField.addActionListener(getUpdateAction());
interior.add(pathField);
interior.add(Box.createRigidArea(vstrut10));
// CENTER: left, right accessory
JPanel centerPanel = new JPanel();
centerPanel.setLayout(new BoxLayout(centerPanel, BoxLayout.LINE_AXIS));
align(centerPanel);
// left panel - Filter & folderList
JPanel leftPanel = new JPanel();
leftPanel.setLayout(new BoxLayout(leftPanel, BoxLayout.PAGE_AXIS));
align(leftPanel);
// add the filter PENDING(jeff) - I18N
l = new JLabel(filterLabelText);
l.setDisplayedMnemonic(filterLabelMnemonic);
align(l);
leftPanel.add(l);
filterComboBox = new JComboBox() {
public Dimension getMaximumSize() {
Dimension d = super.getMaximumSize();
d.height = getPreferredSize().height;
return d;
}
};
filterComboBox.setInheritsPopupMenu(true);
l.setLabelFor(filterComboBox);
filterComboBoxModel = createFilterComboBoxModel();
filterComboBox.setModel(filterComboBoxModel);
filterComboBox.setRenderer(createFilterComboBoxRenderer());
fc.addPropertyChangeListener(filterComboBoxModel);
align(filterComboBox);
leftPanel.add(filterComboBox);
// leftPanel.add(Box.createRigidArea(vstrut10));
// Add the Folder List PENDING(jeff) - I18N
l = new JLabel(foldersLabelText);
l.setDisplayedMnemonic(foldersLabelMnemonic);
align(l);
leftPanel.add(l);
JScrollPane sp = createDirectoryList();
sp.getVerticalScrollBar().setFocusable(false);
sp.getHorizontalScrollBar().setFocusable(false);
sp.setInheritsPopupMenu(true);
l.setLabelFor(sp.getViewport().getView());
leftPanel.add(sp);
leftPanel.setInheritsPopupMenu(true);
// create files list
JPanel rightPanel = new JPanel();
align(rightPanel);
rightPanel.setLayout(new BoxLayout(rightPanel, BoxLayout.PAGE_AXIS));
rightPanel.setInheritsPopupMenu(true);
l = new JLabel(filesLabelText);
l.setDisplayedMnemonic(filesLabelMnemonic);
align(l);
rightPanel.add(l);
sp = createFilesList();
l.setLabelFor(sp.getViewport().getView());
rightPanel.add(sp);
sp.setInheritsPopupMenu(true);
centerPanel.add(leftPanel);
centerPanel.add(Box.createRigidArea(hstrut10));
centerPanel.add(rightPanel);
centerPanel.setInheritsPopupMenu(true);
JComponent accessoryPanel = getAccessoryPanel();
JComponent accessory = fc.getAccessory();
if(accessoryPanel != null) {
if(accessory == null) {
accessoryPanel.setPreferredSize(ZERO_ACC_SIZE);
accessoryPanel.setMaximumSize(ZERO_ACC_SIZE);
} else {
getAccessoryPanel().add(accessory, BorderLayout.CENTER);
accessoryPanel.setPreferredSize(PREF_ACC_SIZE);
accessoryPanel.setMaximumSize(MAX_SIZE);
}
align(accessoryPanel);
centerPanel.add(accessoryPanel);
accessoryPanel.setInheritsPopupMenu(true);
}
interior.add(centerPanel);
interior.add(Box.createRigidArea(vstrut10));
// add the filename field PENDING(jeff) - I18N
fileNameLabel = new JLabel();
populateFileNameLabel();
align(fileNameLabel);
interior.add(fileNameLabel);
filenameTextField = new JTextField() {
public Dimension getMaximumSize() {
Dimension d = super.getMaximumSize();
d.height = getPreferredSize().height;
return d;
}
};
filenameTextField.setInheritsPopupMenu(true);
fileNameLabel.setLabelFor(filenameTextField);
filenameTextField.addActionListener(getApproveSelectionAction());
align(filenameTextField);
filenameTextField.setAlignmentX(JComponent.LEFT_ALIGNMENT);
interior.add(filenameTextField);
bottomPanel = getBottomPanel();
bottomPanel.add(new JSeparator(), BorderLayout.NORTH);
// Add buttons
JPanel buttonPanel = new JPanel();
align(buttonPanel);
buttonPanel.setLayout(new BoxLayout(buttonPanel, BoxLayout.LINE_AXIS));
buttonPanel.add(Box.createGlue());
approveButton = new JButton(getApproveButtonText(fc)) {
public Dimension getMaximumSize() {
return new Dimension(MAX_SIZE.width, this.getPreferredSize().height);
}
};
approveButton.setMnemonic(getApproveButtonMnemonic(fc));
approveButton.setToolTipText(getApproveButtonToolTipText(fc));
approveButton.setInheritsPopupMenu(true);
align(approveButton);
approveButton.setMargin(buttonMargin);
approveButton.addActionListener(getApproveSelectionAction());
buttonPanel.add(approveButton);
buttonPanel.add(Box.createGlue());
JButton updateButton = new JButton(updateButtonText) {
public Dimension getMaximumSize() {
return new Dimension(MAX_SIZE.width, this.getPreferredSize().height);
}
};
updateButton.setMnemonic(updateButtonMnemonic);
updateButton.setToolTipText(updateButtonToolTipText);
updateButton.setInheritsPopupMenu(true);
align(updateButton);
updateButton.setMargin(buttonMargin);
updateButton.addActionListener(getUpdateAction());
buttonPanel.add(updateButton);
buttonPanel.add(Box.createGlue());
JButton cancelButton = new JButton(cancelButtonText) {
public Dimension getMaximumSize() {
return new Dimension(MAX_SIZE.width, this.getPreferredSize().height);
}
};
cancelButton.setMnemonic(cancelButtonMnemonic);
cancelButton.setToolTipText(cancelButtonToolTipText);
cancelButton.setInheritsPopupMenu(true);
align(cancelButton);
cancelButton.setMargin(buttonMargin);
cancelButton.addActionListener(getCancelSelectionAction());
buttonPanel.add(cancelButton);
buttonPanel.add(Box.createGlue());
JButton helpButton = new JButton(helpButtonText) {
public Dimension getMaximumSize() {
return new Dimension(MAX_SIZE.width, this.getPreferredSize().height);
}
};
helpButton.setMnemonic(helpButtonMnemonic);
helpButton.setToolTipText(helpButtonToolTipText);
align(helpButton);
helpButton.setMargin(buttonMargin);
helpButton.setEnabled(false);
helpButton.setInheritsPopupMenu(true);
buttonPanel.add(helpButton);
buttonPanel.add(Box.createGlue());
buttonPanel.setInheritsPopupMenu(true);
bottomPanel.add(buttonPanel, BorderLayout.SOUTH);
bottomPanel.setInheritsPopupMenu(true);
if (fc.getControlButtonsAreShown()) {
fc.add(bottomPanel, BorderLayout.SOUTH);
}
}
protected JPanel getBottomPanel() {
if (bottomPanel == null) {
bottomPanel = new JPanel(new BorderLayout(0, 4));
}
return bottomPanel;
}
private void doControlButtonsChanged(PropertyChangeEvent e) {
if (getFileChooser().getControlButtonsAreShown()) {
getFileChooser().add(bottomPanel,BorderLayout.SOUTH);
} else {
getFileChooser().remove(getBottomPanel());
}
}
public void uninstallComponents(JFileChooser fc) {
fc.removeAll();
if (filterComboBoxModel != null) {
fc.removePropertyChangeListener(filterComboBoxModel);
}
}
protected void installStrings(JFileChooser fc) {
super.installStrings(fc);
Locale l = fc.getLocale();
enterFolderNameLabelText = UIManager.getString("FileChooser.enterFolderNameLabelText",l);
enterFolderNameLabelMnemonic = UIManager.getInt("FileChooser.enterFolderNameLabelMnemonic");
enterFileNameLabelText = UIManager.getString("FileChooser.enterFileNameLabelText",l);
enterFileNameLabelMnemonic = UIManager.getInt("FileChooser.enterFileNameLabelMnemonic");
filesLabelText = UIManager.getString("FileChooser.filesLabelText",l);
filesLabelMnemonic = UIManager.getInt("FileChooser.filesLabelMnemonic");
foldersLabelText = UIManager.getString("FileChooser.foldersLabelText",l);
foldersLabelMnemonic = UIManager.getInt("FileChooser.foldersLabelMnemonic");
pathLabelText = UIManager.getString("FileChooser.pathLabelText",l);
pathLabelMnemonic = UIManager.getInt("FileChooser.pathLabelMnemonic");
filterLabelText = UIManager.getString("FileChooser.filterLabelText",l);
filterLabelMnemonic = UIManager.getInt("FileChooser.filterLabelMnemonic");
}
protected void installIcons(JFileChooser fc) {
// Since motif doesn't have button icons, leave this empty
// which overrides the supertype icon loading
}
protected void uninstallIcons(JFileChooser fc) {
// Since motif doesn't have button icons, leave this empty
// which overrides the supertype icon loading
}
protected JScrollPane createFilesList() {
fileList = new JList();
if(getFileChooser().isMultiSelectionEnabled()) {
fileList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
} else {
fileList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
}
fileList.setModel(new MotifFileListModel());
fileList.getSelectionModel().removeSelectionInterval(0, 0);
fileList.setCellRenderer(new FileCellRenderer());
fileList.addListSelectionListener(createListSelectionListener(getFileChooser()));
fileList.addMouseListener(createDoubleClickListener(getFileChooser(), fileList));
fileList.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
JFileChooser chooser = getFileChooser();
if (SwingUtilities.isLeftMouseButton(e) && !chooser.isMultiSelectionEnabled()) {
int index = SwingUtilities2.loc2IndexFileList(fileList, e.getPoint());
if (index >= 0) {
File file = (File) fileList.getModel().getElementAt(index);
setFileName(chooser.getName(file));
}
}
}
});
align(fileList);
JScrollPane scrollpane = new JScrollPane(fileList);
scrollpane.setPreferredSize(prefListSize);
scrollpane.setMaximumSize(MAX_SIZE);
align(scrollpane);
fileList.setInheritsPopupMenu(true);
scrollpane.setInheritsPopupMenu(true);
return scrollpane;
}
protected JScrollPane createDirectoryList() {
directoryList = new JList();
align(directoryList);
directoryList.setCellRenderer(new DirectoryCellRenderer());
directoryList.setModel(new MotifDirectoryListModel());
directoryList.getSelectionModel().removeSelectionInterval(0, 0);
directoryList.addMouseListener(createDoubleClickListener(getFileChooser(), directoryList));
directoryList.addListSelectionListener(createListSelectionListener(getFileChooser()));
directoryList.setInheritsPopupMenu(true);
JScrollPane scrollpane = new JScrollPane(directoryList);
scrollpane.setMaximumSize(MAX_SIZE);
scrollpane.setPreferredSize(prefListSize);
scrollpane.setInheritsPopupMenu(true);
align(scrollpane);
return scrollpane;
}
public Dimension getPreferredSize(JComponent c) {
Dimension prefSize =
(getFileChooser().getAccessory() != null) ? WITH_ACCELERATOR_PREF_SIZE : PREF_SIZE;
Dimension d = c.getLayout().preferredLayoutSize(c);
if (d != null) {
return new Dimension(d.width < prefSize.width ? prefSize.width : d.width,
d.height < prefSize.height ? prefSize.height : d.height);
} else {
return prefSize;
}
}
public Dimension getMinimumSize(JComponent x) {
return MIN_SIZE;
}
public Dimension getMaximumSize(JComponent x) {
return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE);
}
protected void align(JComponent c) {
c.setAlignmentX(JComponent.LEFT_ALIGNMENT);
c.setAlignmentY(JComponent.TOP_ALIGNMENT);
}
protected class FileCellRenderer extends DefaultListCellRenderer {
public Component getListCellRendererComponent(JList list, Object value, int index,
boolean isSelected, boolean cellHasFocus) {
super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
setText(getFileChooser().getName((File) value));
setInheritsPopupMenu(true);
return this;
}
}
protected class DirectoryCellRenderer extends DefaultListCellRenderer {
public Component getListCellRendererComponent(JList list, Object value, int index,
boolean isSelected, boolean cellHasFocus) {
super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
setText(getFileChooser().getName((File) value));
setInheritsPopupMenu(true);
return this;
}
}
protected class MotifDirectoryListModel extends AbstractListModel implements ListDataListener {
public MotifDirectoryListModel() {
getModel().addListDataListener(this);
}
public int getSize() {
return getModel().getDirectories().size();
}
public Object getElementAt(int index) {
return getModel().getDirectories().elementAt(index);
}
public void intervalAdded(ListDataEvent e) {
fireIntervalAdded(this, e.getIndex0(), e.getIndex1());
}
public void intervalRemoved(ListDataEvent e) {
fireIntervalRemoved(this, e.getIndex0(), e.getIndex1());
}
// PENDING(jeff) - this is inefficient - should sent out
// incremental adjustment values instead of saying that the
// whole list has changed.
public void fireContentsChanged() {
fireContentsChanged(this, 0, getModel().getDirectories().size()-1);
}
// PENDING(jeff) - fire the correct interval changed - currently sending
// out that everything has changed
public void contentsChanged(ListDataEvent e) {
fireContentsChanged();
}
}
protected class MotifFileListModel extends AbstractListModel implements ListDataListener {
public MotifFileListModel() {
getModel().addListDataListener(this);
}
public int getSize() {
return getModel().getFiles().size();
}
public boolean contains(Object o) {
return getModel().getFiles().contains(o);
}
public int indexOf(Object o) {
return getModel().getFiles().indexOf(o);
}
public Object getElementAt(int index) {
return getModel().getFiles().elementAt(index);
}
public void intervalAdded(ListDataEvent e) {
fireIntervalAdded(this, e.getIndex0(), e.getIndex1());
}
public void intervalRemoved(ListDataEvent e) {
fireIntervalRemoved(this, e.getIndex0(), e.getIndex1());
}
// PENDING(jeff) - this is inefficient - should sent out
// incremental adjustment values instead of saying that the
// whole list has changed.
public void fireContentsChanged() {
fireContentsChanged(this, 0, getModel().getFiles().size()-1);
}
// PENDING(jeff) - fire the interval changed
public void contentsChanged(ListDataEvent e) {
fireContentsChanged();
}
}
//
// DataModel for Types Comboxbox
//
protected FilterComboBoxModel createFilterComboBoxModel() {
return new FilterComboBoxModel();
}
//
// Renderer for Types ComboBox
//
protected FilterComboBoxRenderer createFilterComboBoxRenderer() {
return new FilterComboBoxRenderer();
}
/**
* Render different type sizes and styles.
*/
public class FilterComboBoxRenderer extends DefaultListCellRenderer {
public Component getListCellRendererComponent(JList list,
Object value, int index, boolean isSelected,
boolean cellHasFocus) {
super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
if (value != null && value instanceof FileFilter) {
setText(((FileFilter)value).getDescription());
}
return this;
}
}
/**
* Data model for a type-face selection combo-box.
*/
protected class FilterComboBoxModel extends AbstractListModel implements ComboBoxModel, PropertyChangeListener {
protected FileFilter[] filters;
protected FilterComboBoxModel() {
super();
filters = getFileChooser().getChoosableFileFilters();
}
public void propertyChange(PropertyChangeEvent e) {
String prop = e.getPropertyName();
if(prop.equals(JFileChooser.CHOOSABLE_FILE_FILTER_CHANGED_PROPERTY)) {
filters = (FileFilter[]) e.getNewValue();
fireContentsChanged(this, -1, -1);
} else if (prop.equals(JFileChooser.FILE_FILTER_CHANGED_PROPERTY)) {
fireContentsChanged(this, -1, -1);
}
}
public void setSelectedItem(Object filter) {
if(filter != null) {
getFileChooser().setFileFilter((FileFilter) filter);
fireContentsChanged(this, -1, -1);
}
}
public Object getSelectedItem() {
// Ensure that the current filter is in the list.
// NOTE: we shouldnt' have to do this, since JFileChooser adds
// the filter to the choosable filters list when the filter
// is set. Lets be paranoid just in case someone overrides
// setFileFilter in JFileChooser.
FileFilter currentFilter = getFileChooser().getFileFilter();
boolean found = false;
if(currentFilter != null) {
for (FileFilter filter : filters) {
if (filter == currentFilter) {
found = true;
}
}
if (!found) {
getFileChooser().addChoosableFileFilter(currentFilter);
}
}
return getFileChooser().getFileFilter();
}
public int getSize() {
if(filters != null) {
return filters.length;
} else {
return 0;
}
}
public Object getElementAt(int index) {
if(index > getSize() - 1) {
// This shouldn't happen. Try to recover gracefully.
return getFileChooser().getFileFilter();
}
if(filters != null) {
return filters[index];
} else {
return null;
}
}
}
protected JButton getApproveButton(JFileChooser fc) {
return approveButton;
}
}
| |
package cn.ragnarok.monoreader.app.ui.fragment;
import android.app.ActionBar;
import android.app.Activity;
import android.app.DialogFragment;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.app.Fragment;
import android.util.Log;
import android.view.ActionMode;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.android.volley.VolleyError;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import cn.ragnarok.monoreader.api.base.APIRequestFinishListener;
import cn.ragnarok.monoreader.api.object.SiteObject;
import cn.ragnarok.monoreader.api.service.SiteService;
import cn.ragnarok.monoreader.api.service.SubscribeService;
import cn.ragnarok.monoreader.app.R;
import cn.ragnarok.monoreader.app.ui.activity.CategoryTimelineActivity;
import cn.ragnarok.monoreader.app.ui.activity.SiteArticleListActivity;
import cn.ragnarok.monoreader.app.ui.adapter.SiteListAdapter;
import cn.ragnarok.monoreader.app.util.Utils;
import se.emilsjolander.stickylistheaders.StickyListHeadersListView;
import uk.co.senab.actionbarpulltorefresh.library.ActionBarPullToRefresh;
import uk.co.senab.actionbarpulltorefresh.library.Options;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout;
import uk.co.senab.actionbarpulltorefresh.library.listeners.OnRefreshListener;
public class SiteListFragment extends Fragment {
public static String TAG = "Mono.SiteListFragment";
private SubscribeService mSubscribeService;
private SiteService mSiteService;
private StickyListHeadersListView mSiteList;
private ProgressBar mProgressBar;
private PullToRefreshLayout mPtrLayout;
private Collection<SiteObject> mSiteCollection;
private APIRequestFinishListener<Collection<SiteObject>> mGetSiteListRequestListener;
private SiteListAdapter mSiteListAdapter = null;
private SubscribeFragment mSubscribeFragment;
private AbsListView.MultiChoiceModeListener mUnsubscribeMode;
private ArrayList<Integer> mSelectSite = new ArrayList<Integer>();
private ProgressDialog mProgressDialog;
private SiteListAdapter.OnCategorySetFinishListener mCategorySetListener;
public static SiteListFragment newInstance() {
SiteListFragment fragment = new SiteListFragment();
return fragment;
}
public SiteListFragment() {
// Required empty public constructor
mSubscribeService = new SubscribeService();
mSiteService = new SiteService();
mSubscribeFragment = new SubscribeFragment();
mCategorySetListener = new SiteListAdapter.OnCategorySetFinishListener() {
@Override
public void onCategorySetFinish() {
loadSiteList();
}
};
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getActivity().setTitle("Sites");
getActivity().getActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
initGetSiteRequestListener();
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_site_list, container, false);
mSiteList = (StickyListHeadersListView) view.findViewById(R.id.site_list);
mProgressBar = (ProgressBar) view.findViewById(R.id.loading_progress);
mPtrLayout = (PullToRefreshLayout) view.findViewById(R.id.ptr_layout);
mUnsubscribeMode = new AbsListView.MultiChoiceModeListener() {
@Override
public void onItemCheckedStateChanged(ActionMode actionMode, int position, long id, boolean checked) {
Log.d(TAG, "check: " + checked + ", positioin: " + position);
int siteId = ((SiteObject)mSiteListAdapter.getItem(position)).siteId;
if (checked) {
mSelectSite.add(siteId);
} else {
mSelectSite.remove(new Integer(siteId));
}
}
@Override
public boolean onCreateActionMode(ActionMode actionMode, Menu menu) {
actionMode.getMenuInflater().inflate(R.menu.site_actionmode, menu);
mSelectSite.clear();
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode actionMode, Menu menu) {
return false;
}
@Override
public boolean onActionItemClicked(ActionMode actionMode, MenuItem menuItem) {
int itemId = menuItem.getItemId();
if (itemId == R.id.action_unsubsribe) {
actionMode.finish();
mProgressDialog = ProgressDialog.show(getActivity(), "Please waiting", "");
unSubscribeSelectSite();
}
return false;
}
@Override
public void onDestroyActionMode(ActionMode actionMode) {
}
};
initSiteList();
initPtrLayout();
mSubscribeFragment.setOnSubscribeSuccessListener(new SubscribeFragment.OnSubscribeSuccessListener() {
@Override
public void onSubscribeSuccess() {
loadSiteList();
}
});
return view;
}
private void initPtrLayout() {
Options.Builder ptrOptions = Options.create();
ptrOptions.refreshOnUp(true);
ptrOptions.scrollDistance(0.4f);
ActionBarPullToRefresh.from(getActivity()).theseChildrenArePullable(mSiteList.getWrappedList()).options(ptrOptions.build()).
listener(new OnRefreshListener() {
@Override
public void onRefreshStarted(View view) {
loadSiteList();
}
}).setup(mPtrLayout);
loadSiteList();
}
private void initSiteList() {
mSiteList.setDivider(getResources().getDrawable(R.drawable.list_divider));
mSiteList.setDividerHeight((int) Utils.dpToPix(getActivity(), 0.5f));
mSiteList.setDrawingListUnderStickyHeader(true);
mSiteList.setAreHeadersSticky(true);
mSiteList.setOnStickyHeaderOffsetChangedListener(new StickyListHeadersListView.OnStickyHeaderOffsetChangedListener() {
@Override
public void onStickyHeaderOffsetChanged(StickyListHeadersListView stickyListHeadersListView, View header, int offset) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
header.setAlpha(1 - (offset / (float) header.getMeasuredHeight()));
}
}
});
mSiteList.getWrappedList().setChoiceMode(AbsListView.CHOICE_MODE_MULTIPLE_MODAL);
mSiteList.getWrappedList().setMultiChoiceModeListener(mUnsubscribeMode);
mSiteList.setOnHeaderClickListener(new StickyListHeadersListView.OnHeaderClickListener() {
@Override
public void onHeaderClick(StickyListHeadersListView l, View header, int itemPosition, long headerId, boolean currentlySticky) {
SiteObject site = (SiteObject) mSiteListAdapter.getItem(itemPosition);
if (site != null) {
String category = site.category;
if (site.isUnClassified) {
category = getString(R.string.un_classified_name);
}
String[] categorySet = mSiteListAdapter.getCategorySet();
Log.d(TAG, "start category timelnie, category: " + category + ", categorySet: " + Arrays.toString(categorySet));
Intent intent = new Intent(getActivity(), CategoryTimelineActivity.class);
intent.putExtra(CategoryTimelineActivity.CATEGORY, category);
intent.putExtra(CategoryTimelineActivity.CATEGORY_SET, categorySet);
startActivity(intent);
}
}
});
mSiteList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
Intent intent = new Intent(getActivity(), SiteArticleListActivity.class);
if (mSiteListAdapter != null) {
SiteObject site = ((SiteObject)mSiteListAdapter.getItem(i));
intent.putExtra(SiteArticleListActivity.SITE_ID, site.siteId);
intent.putExtra(SiteArticleListActivity.SITE_TITLE, site.title);
}
startActivity(intent);
}
});
}
private void loadSiteList() {
mPtrLayout.setRefreshing(true);
mSiteList.setVisibility(View.GONE);
mProgressBar.setVisibility(View.VISIBLE);
mSiteService.loadAllSite(mGetSiteListRequestListener);
}
private void initGetSiteRequestListener() {
mGetSiteListRequestListener = new APIRequestFinishListener<Collection<SiteObject>>() {
@Override
public void onRequestSuccess() {
}
@Override
public void onRequestFail(VolleyError error) {
Log.d(TAG, "get site list error: " + error.toString());
Toast.makeText(getActivity(), R.string.connection_failed, Toast.LENGTH_SHORT).show();
setLoadFinishViewVisibility();
mPtrLayout.setRefreshComplete();
}
@Override
public void onGetResult(Collection<SiteObject> result) {
Log.d(TAG, "successfully get site list, size: " + result.size());
setLoadFinishViewVisibility();
mPtrLayout.setRefreshComplete();
mSiteCollection = result;
SiteObject[] data = new SiteObject[result.size()];
mSiteCollection.toArray(data);
// if (mSiteListAdapter == null) {
mSiteListAdapter = new SiteListAdapter(getActivity(), data);
mSiteListAdapter.setOnCategorySetFinishListener(mCategorySetListener);
mSiteList.setAdapter(mSiteListAdapter);
// } else {
// mSiteListAdapter.setData(data);
// }
}
};
}
private void setLoadFinishViewVisibility() {
//if (mProgressBar.getVisibility() != View.GONE) {
mProgressBar.setVisibility(View.GONE);
//}
//if (mSiteList.getVisibility() != View.VISIBLE) {
mSiteList.setVisibility(View.VISIBLE);
//}
}
private void unSubscribeSelectSite() {
Log.d(TAG, "unsubscribe select site, size: " + mSelectSite.size());
if (mSelectSite.size() > 0 && mSiteListAdapter != null) {
mSubscribeService.bundleUnSubscribe(mSelectSite, new APIRequestFinishListener() {
@Override
public void onRequestSuccess() {
if (mProgressDialog != null) {
mProgressDialog.dismiss();
}
loadSiteList();
}
@Override
public void onRequestFail(VolleyError error) {
Log.d(TAG, "bundle unsubscribe failed, error: " + error.toString());
if (mProgressDialog != null) {
mProgressDialog.dismiss();
}
loadSiteList();
}
@Override
public void onGetResult(Object result) {
}
});
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
}
@Override
public void onDetach() {
super.onDetach();
mSubscribeService.cancelRequest();
mSiteService.cancelRequest();
}
private void subscribe() {
mSubscribeFragment.show(getFragmentManager(), "Subscribe");
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_subscribe) {
subscribe();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.site_list, menu);
}
}
| |
/*
* AdminMarket
*
* Copyright 2020 by Walter Bolles <mail@wjbolles.com>
*
* Licensed under the Apache License, Version 2.0
*/
package com.wjbolles.adminmarket.eco.model;
import com.wjbolles.adminmarket.AdminMarketConfig;
import org.bukkit.Material;
public class ItemListing {
private static final int DEFAULT_EQUILIBRIUM = 1000;
private static final int DEFAULT_BASE_PRICE = 0;
private static final int DEFAULT_VALUE_ADDED_TAX = 0;
private static final int DEFAULT_INVENTORY = 0;
private final Material material;
private boolean isInfinite;
private int inventory;
private double basePrice;
private double valueAddedTax;
private int equilibrium;
private final AdminMarketConfig config;
public ItemListing(Material material, boolean isInfinite, AdminMarketConfig config) {
this.material = material;
this.isInfinite = isInfinite;
this.basePrice = DEFAULT_BASE_PRICE;
this.inventory = DEFAULT_INVENTORY;
this.valueAddedTax = DEFAULT_VALUE_ADDED_TAX;
this.equilibrium = DEFAULT_EQUILIBRIUM;
this.config = config;
}
protected ItemListing(Material material,
boolean isInfinite,
double basePrice,
int inventory,
double valueAddedTax,
int equilibrium,
AdminMarketConfig config) {
this.material = material;
this.isInfinite = isInfinite;
this.basePrice = basePrice;
this.inventory = inventory;
this.valueAddedTax = valueAddedTax;
this.equilibrium = equilibrium;
this.config = config;
}
public double getValueAddedTax() {
return valueAddedTax;
}
public void setValueAddedTax(double valueAddedTax) {
this.valueAddedTax = valueAddedTax;
}
public String getMaterialAsString(){
return material.toString();
}
public boolean isInfinite() {
return isInfinite;
}
public void setInfinite(boolean isInfinite) {
this.isInfinite = isInfinite;
}
public int getInventory() {
return inventory;
}
public void setInventory(int inventory) {
this.inventory = inventory;
}
public void addInventory(int inventory) {
this.inventory += inventory;
}
public void removeInventory(int inventory) throws IllegalArgumentException {
if (this.inventory - inventory < 0) {
throw new IllegalArgumentException();
}
this.inventory -= inventory;
}
private double getCurrentFloatingPrice(double inventory) {
double slope = (basePrice - basePrice * (1+config.getMaxPercentBasePrice()))/ -equilibrium;
double sellPrice = basePrice * (1+config.getMaxPercentBasePrice()) - slope * inventory;
double floor = basePrice - basePrice*config.getMaxPercentBasePrice();
if (sellPrice < floor) {
sellPrice = floor;
}
return sellPrice;
}
public double getSellPrice() {
return getSellPrice(this.inventory);
}
private double getSellPrice(int inventory) {
return getBuyPrice(inventory+1);
}
public double getBuyPrice() {
return getBuyPrice(this.inventory);
}
private double getBuyPrice(int inventory) {
if(config.getUseFloatingPrices() && !isInfinite) {
return getCurrentFloatingPrice(inventory);
} else {
return basePrice;
}
}
public double getBasePrice() {
return this.basePrice;
}
public void setBasePrice(double basePrice) {
this.basePrice = basePrice;
}
public Material getMaterial() {
return material;
}
public int getEquilibrium() {
return this.equilibrium;
}
public void setEquilibrium(int equilibrium) {
this.equilibrium = equilibrium;
}
private double integrate(int a_int, int b_int) {
final int MIN_ROUNDS = 4;
double a = 0.5 + a_int; // shifting the bounds 0.5 helps ensure
double b = 0.5 + b_int; // the calculated price is more accurate (https://introcs.cs.princeton.edu/java/93integration/TrapezoidalRule.java.html)
long N = Math.round(Math.log((b-a)) / Math.log(2)) * 2; // Do more rounds the more inventory there is for greater precision
if (N < MIN_ROUNDS) {
N = MIN_ROUNDS;
}
double h = (b - a) / N;
double sum = 0.5 * (getCurrentFloatingPrice(a) + getCurrentFloatingPrice(b));
for (int i = 1; i < N; i++) {
double x = a + h * i;
sum = sum + getCurrentFloatingPrice(x);
}
return (double) Math.round(sum * h * 100)/ 100;
}
public double getTotalSellPrice(int amount){
if(config.getUseFloatingPrices() && !isInfinite) {
double totalPrice = integrate(inventory, inventory+amount);
return totalPrice;
} else {
return basePrice*amount;
}
}
public double getTotalBuyPrice(int amount){
if(config.getUseFloatingPrices() && !isInfinite) {
double totalPrice = integrate(inventory-amount, inventory);
return totalPrice;
} else {
return basePrice*amount;
}
}
// Replaced with more efficient method that uses
// integration to approximate the total cost
@Deprecated
public double getTotalSellPriceLegacy(int amount) {
if (isInfinite) {
return getSellPrice() * amount;
}
int inventory = this.getInventory();
double total = 0;
for(int i = 0; i < amount; i++) {
total += getSellPrice(inventory);
inventory++;
}
return total;
}
// Replaced with more efficient method that uses
// integration to approximate the total cost
@Deprecated
public double getTotalBuyPriceLegacy(int amount) {
if (isInfinite) {
return getBuyPrice() * amount;
}
int inventory = this.getInventory();
double total = 0;
for(int i = 0; i < amount; i++) {
if (inventory == 0) {
break;
}
total += getBuyPrice(inventory);
inventory--;
}
return total;
}
@Override
public int hashCode() {
int result;
long temp;
result = material.hashCode();
result = 31 * result + (isInfinite ? 1 : 0);
result = 31 * result + inventory;
temp = Double.doubleToLongBits(basePrice);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(valueAddedTax);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + equilibrium;
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ItemListing that = (ItemListing) o;
if (isInfinite != that.isInfinite) return false;
if (inventory != that.inventory) return false;
if (Double.compare(that.basePrice, basePrice) != 0) return false;
if (Double.compare(that.valueAddedTax, valueAddedTax) != 0) return false;
if (equilibrium != that.equilibrium) return false;
return material.equals(that.material);
}
@Override
public String toString() {
return "ItemListing{" +
"material=" + material.toString() +
", isInfinite=" + isInfinite +
", inventory=" + inventory +
", basePrice=" + basePrice +
", valueAddedTax=" + valueAddedTax +
", equilibrium=" + equilibrium +
", config=" + config +
'}';
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server;
import com.facebook.presto.dispatcher.DispatchManager;
import com.facebook.presto.execution.QueryInfo;
import com.facebook.presto.execution.QueryManager;
import com.facebook.presto.execution.QueryState;
import com.facebook.presto.execution.StageId;
import com.facebook.presto.metadata.InternalNode;
import com.facebook.presto.metadata.InternalNodeManager;
import com.facebook.presto.resourcemanager.ResourceManagerProxy;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.QueryId;
import com.google.common.collect.ImmutableList;
import javax.annotation.security.RolesAllowed;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.container.AsyncResponse;
import javax.ws.rs.container.Suspended;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.util.Iterator;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.Optional;
import static com.facebook.presto.connector.system.KillQueryProcedure.createKillQueryException;
import static com.facebook.presto.connector.system.KillQueryProcedure.createPreemptQueryException;
import static com.facebook.presto.server.security.RoleType.ADMIN;
import static com.facebook.presto.server.security.RoleType.USER;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO;
import static java.util.Objects.requireNonNull;
import static javax.ws.rs.core.Response.Status.NO_CONTENT;
import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE;
/**
* Manage queries scheduled on this node
*/
@Path("/v1/query")
@RolesAllowed({USER, ADMIN})
public class QueryResource
{
// TODO There should be a combined interface for this
private final boolean resourceManagerEnabled;
private final DispatchManager dispatchManager;
private final QueryManager queryManager;
private final InternalNodeManager internalNodeManager;
private final Optional<ResourceManagerProxy> proxyHelper;
@Inject
public QueryResource(
ServerConfig serverConfig,
DispatchManager dispatchManager,
QueryManager queryManager,
InternalNodeManager internalNodeManager,
Optional<ResourceManagerProxy> proxyHelper)
{
this.resourceManagerEnabled = requireNonNull(serverConfig, "serverConfig is null").isResourceManagerEnabled();
this.dispatchManager = requireNonNull(dispatchManager, "dispatchManager is null");
this.queryManager = requireNonNull(queryManager, "queryManager is null");
this.internalNodeManager = requireNonNull(internalNodeManager, "internalNodeManager is null");
this.proxyHelper = requireNonNull(proxyHelper, "proxyHelper is null");
}
@GET
public void getAllQueryInfo(
@QueryParam("state") String stateFilter,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
if (resourceManagerEnabled) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
QueryState expectedState = stateFilter == null ? null : QueryState.valueOf(stateFilter.toUpperCase(Locale.ENGLISH));
ImmutableList.Builder<BasicQueryInfo> builder = new ImmutableList.Builder<>();
for (BasicQueryInfo queryInfo : dispatchManager.getQueries()) {
if (stateFilter == null || queryInfo.getState() == expectedState) {
builder.add(queryInfo);
}
}
asyncResponse.resume(Response.ok(builder.build()).build());
}
@GET
@Path("{queryId}")
public void getQueryInfo(
@PathParam("queryId") QueryId queryId,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
requireNonNull(queryId, "queryId is null");
if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
try {
QueryInfo queryInfo = queryManager.getFullQueryInfo(queryId);
asyncResponse.resume(Response.ok(queryInfo).build());
}
catch (NoSuchElementException e) {
try {
BasicQueryInfo basicQueryInfo = dispatchManager.getQueryInfo(queryId);
asyncResponse.resume(Response.ok(basicQueryInfo).build());
}
catch (NoSuchElementException ex) {
asyncResponse.resume(Response.status(Status.GONE).build());
}
}
}
@DELETE
@Path("{queryId}")
public void cancelQuery(
@PathParam("queryId") QueryId queryId,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
requireNonNull(queryId, "queryId is null");
if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
dispatchManager.cancelQuery(queryId);
asyncResponse.resume(Response.status(NO_CONTENT).build());
}
@PUT
@Path("{queryId}/killed")
public void killQuery(
@PathParam("queryId") QueryId queryId,
String message,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
if (resourceManagerEnabled && !dispatchManager.isQueryPresent(queryId)) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
asyncResponse.resume(failQuery(queryId, createKillQueryException(message)));
}
@PUT
@Path("{queryId}/preempted")
public void preemptQuery(
@PathParam("queryId") QueryId queryId,
String message,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
if (!dispatchManager.isQueryPresent(queryId)) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
asyncResponse.resume(failQuery(queryId, createPreemptQueryException(message)));
}
private Response failQuery(QueryId queryId, PrestoException queryException)
{
requireNonNull(queryId, "queryId is null");
try {
BasicQueryInfo state = dispatchManager.getQueryInfo(queryId);
// check before killing to provide the proper error code (this is racy)
if (state.getState().isDone()) {
return Response.status(Status.CONFLICT).build();
}
dispatchManager.failQuery(queryId, queryException);
// verify if the query was failed (if not, we lost the race)
if (!queryException.getErrorCode().equals(dispatchManager.getQueryInfo(queryId).getErrorCode())) {
return Response.status(Status.CONFLICT).build();
}
return Response.status(Status.OK).build();
}
catch (NoSuchElementException e) {
return Response.status(Status.GONE).build();
}
}
@DELETE
@Path("stage/{stageId}")
public void cancelStage(
@PathParam("stageId") StageId stageId,
@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto,
@Context UriInfo uriInfo,
@Context HttpServletRequest servletRequest,
@Suspended AsyncResponse asyncResponse)
{
requireNonNull(stageId, "stageId is null");
if (!dispatchManager.isQueryPresent(stageId.getQueryId())) {
proxyResponse(servletRequest, asyncResponse, xForwardedProto, uriInfo);
return;
}
queryManager.cancelStage(stageId);
asyncResponse.resume(Response.ok().build());
}
private void proxyResponse(HttpServletRequest servletRequest, AsyncResponse asyncResponse, String xForwardedProto, UriInfo uriInfo)
{
try {
checkState(proxyHelper.isPresent());
Iterator<InternalNode> resourceManagers = internalNodeManager.getResourceManagers().iterator();
if (!resourceManagers.hasNext()) {
asyncResponse.resume(Response.status(SERVICE_UNAVAILABLE).build());
return;
}
InternalNode resourceManagerNode = resourceManagers.next();
String scheme = isNullOrEmpty(xForwardedProto) ? uriInfo.getRequestUri().getScheme() : xForwardedProto;
URI uri = uriInfo.getRequestUriBuilder()
.scheme(scheme)
.host(resourceManagerNode.getHostAndPort().toInetAddress().getHostName())
.port(resourceManagerNode.getInternalUri().getPort())
.build();
proxyHelper.get().performRequest(servletRequest, asyncResponse, uri);
}
catch (Exception e) {
asyncResponse.resume(e);
}
}
}
| |
package org.jutility.io.csv;
// @formatter:off
/*
* #%L
* jutility-io
* %%
* Copyright (C) 2013 - 2014 jutility.org
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
//@formatter:on
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.jutility.common.datatype.table.ITable;
import org.jutility.common.datatype.table.Table;
import org.jutility.io.ISerializer;
import org.jutility.io.SerializationException;
import org.supercsv.io.CsvMapReader;
import org.supercsv.io.CsvMapWriter;
import org.supercsv.io.ICsvMapReader;
import org.supercsv.io.ICsvMapWriter;
import org.supercsv.prefs.CsvPreference;
/**
* The {@code CsvSerializer} class provides an implementation of the
* {@link ISerializer} interface for CSV files.
*
* @author Peter J. Radics
* @version 0.1.2
* @since 0.1.0
*/
public class CsvSerializer
implements ISerializer {
private static CsvSerializer s_Instance;
/**
* Returns the Singleton instance of the {@code CsvSerializer}.
*
* @return the Singleton instance.
*/
public static CsvSerializer Instance() {
if (s_Instance == null) {
s_Instance = new CsvSerializer();
}
return s_Instance;
}
private CsvSerializer() {
// Nothing to be done.
}
@Override
public boolean supportsSerializationOf(Class<?> type) {
return Table.class.isAssignableFrom(type);
}
@Override
public boolean supportsDeserializationOf(Class<?> type) {
return type.isAssignableFrom(Table.class);
}
@Override
public <T> void serialize(T document, String filename)
throws SerializationException {
Class<?> documentType = document.getClass();
if (!this.supportsSerializationOf(documentType)) {
throw new SerializationException(
"Serialization of type " + documentType
+ " is not supported!");
}
Table<?> table = Table.class.cast(document);
ICsvMapWriter mapWriter = null;
try {
mapWriter = new CsvMapWriter(new FileWriter(filename),
CsvPreference.STANDARD_PREFERENCE);
String[] header = new String[table.columns()];
for (int i = 0; i < table.columns(); i++) {
Object value = table.get(0, i);
if (value != null) {
header[i] = value.toString();
}
else {
header[i] = "";
}
}
mapWriter.writeHeader(header);
for (int row = 1; row < table.rows(); row++) {
Map<String, String> values = new LinkedHashMap<>();
for (int column = 0; column < table.columns(); column++) {
String key = header[column];
Object value = table.get(row, column);
if (value != null) {
values.put(key, value.toString());
}
else {
values.put(key, "");
}
}
mapWriter.write(values, header);
}
}
catch (IOException e) {
throw new SerializationException("Could not serialize resource.",
e);
}
finally {
if (mapWriter != null) {
try {
mapWriter.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
}
@Override
public <T> T deserialize(File file, Class<? extends T> type)
throws SerializationException {
return this.deserialize(file.toURI(), type);
}
@Override
public <T> T deserialize(URI uri, Class<? extends T> type)
throws SerializationException {
try {
return this.deserialize(uri.toURL(), type);
}
catch (MalformedURLException e) {
throw new SerializationException("URI " + uri + " is malformed.",
e);
}
}
@Override
public <T> T deserialize(URL url, Class<? extends T> type)
throws SerializationException {
if (!this.supportsDeserializationOf(type)) {
throw new SerializationException(
"Deserialization of type " + type.toString()
+ " is not supported!");
}
Map<String, List<String>> contents = new LinkedHashMap<>();
ITable<String> table = new Table<>();
try (ICsvMapReader mapReader = new CsvMapReader(
new BufferedReader(new InputStreamReader(url.openStream())),
CsvPreference.STANDARD_PREFERENCE)) {
// the header columns are used as the keys to the Map
final String[] headerArray = mapReader.getHeader(true);
final List<String> header = Arrays.asList(headerArray);
AtomicInteger headerIndex = new AtomicInteger(0);
header.forEach(columnHeader -> {
contents.put(columnHeader, new LinkedList<>());
table.add(0, headerIndex.get(),
columnHeader != null ? columnHeader.trim() : null);
headerIndex.incrementAndGet();
});
Map<String, String> rowValueMap;
while ((rowValueMap = mapReader.read(headerArray)) != null) {
final Map<String, String> finalRowValueMap = rowValueMap;
rowValueMap.keySet()
.forEach(key -> contents.get(key)
.add(finalRowValueMap.get(key)));
}
AtomicInteger column = new AtomicInteger(0);
contents.keySet()
.forEach(key -> {
AtomicInteger row = new AtomicInteger(1);
contents.get(key)
.forEach(value -> {
if (value != null) {
table.add(row.get(), column.get(),
value.trim());
}
row.incrementAndGet();
});
column.incrementAndGet();
});
}
catch (IOException e) {
throw new SerializationException(
"Could not deserialize CSV file " + url.toString() + "!",
e);
}
return type.cast(table);
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache.ha;
import java.util.Iterator;
import java.util.Properties;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.util.BridgeServer;
import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.cache30.BridgeTestCase;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.BridgeObserverAdapter;
import com.gemstone.gemfire.internal.cache.BridgeObserverHolder;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.VM;
/**
*
* Dunit test to verify HA feature. Have 2 nodes S1 & S2. Client is connected to S1 & S2 with S1 as the primary end point.
* Do some puts on S1 .The expiry is on high side. Stop S1 , the client is failing to S2.During fail over duration do some
* puts on S1. The client on failing to S2 may receive duplicate events but should not miss any events.
*
* @author Suyog Bhokare
*
*/
public class FailoverDUnitTest extends DistributedTestCase
{
protected static Cache cache = null;
//server
private static VM vm0 = null;
private static VM vm1 = null;
protected static VM primary = null;
private static int PORT1;
private static int PORT2;
private static final String regionName = "interestRegion";
/** constructor */
public FailoverDUnitTest(String name) {
super(name);
}
public void setUp() throws Exception
{
super.setUp();
final Host host = Host.getHost(0);
vm0 = host.getVM(0);
vm1 = host.getVM(1);
//start servers first
vm0.invoke(ConflationDUnitTest.class, "unsetIsSlowStart");
vm1.invoke(ConflationDUnitTest.class, "unsetIsSlowStart");
PORT1 = ((Integer)vm0.invoke(FailoverDUnitTest.class, "createServerCache" )).intValue();
PORT2 = ((Integer)vm1.invoke(FailoverDUnitTest.class, "createServerCache" )).intValue();
CacheServerTestUtil.disableShufflingOfEndpoints();
createClientCache(getServerHostName(host), new Integer(PORT1),new Integer(PORT2));
{ // calculate the primary vm
waitForPrimaryAndBackups(1);
PoolImpl pool = (PoolImpl)PoolManager.find("FailoverPool");
if (pool.getPrimaryPort() == PORT1) {
primary = vm0;
} else {
assertEquals(PORT2, pool.getPrimaryPort());
primary = vm1;
}
}
}
public void testFailover()
{
createEntries();
waitForPrimaryAndBackups(1);
registerInterestList();
primary.invoke(FailoverDUnitTest.class, "put");
verifyEntries();
setBridgeObserver();
primary.invoke(FailoverDUnitTest.class, "stopServer");
verifyEntriesAfterFailover();
}
private void createCache(Properties props) throws Exception
{
DistributedSystem ds = getSystem(props);
ds.disconnect();
ds = getSystem(props);
assertNotNull(ds);
cache = CacheFactory.create(ds);
assertNotNull(cache);
}
public static void createClientCache(String hostName, Integer port1 , Integer port2) throws Exception
{
PORT1 = port1.intValue();
PORT2 = port2.intValue();
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
new FailoverDUnitTest("temp").createCache(props);
/*props.setProperty("retryAttempts", "5");
props.setProperty("endpoints", "ep1=" + hostName + ":"+PORT1+",ep2="
+ hostName + ":"+PORT2);
props.setProperty("redundancyLevel", "-1");
props.setProperty("establishCallbackConnection", "true");
props.setProperty("LBPolicy", "RoundRobin");
props.setProperty("readTimeout", "250");
props.setProperty("socketBufferSize", "32768");
props.setProperty("retryInterval", "1000");
props.setProperty("connectionsPerServer", "2");
*/
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
BridgeTestCase.configureConnectionPoolWithName(factory, hostName, new int[] {PORT1,PORT2}, true, -1, 2, null, "FailoverPool");
factory.setCacheListener(new CacheListenerAdapter() {
public void afterUpdate(EntryEvent event)
{
synchronized (this) {
cache.getLogger().info("Event Received : key..."+ event.getKey());
cache.getLogger().info("Event Received : value..."+ event.getNewValue());
}
}
});
cache.createRegion(regionName, factory.create());
}
public static Integer createServerCache() throws Exception
{
new FailoverDUnitTest("temp").createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
cache.createRegion(regionName, attrs);
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET) ;
BridgeServer server1 = cache.addBridgeServer();
server1.setPort(port);
server1.setNotifyBySubscription(true);
server1.start();
return new Integer(server1.getPort());
}
public void waitForPrimaryAndBackups(final int numBackups) {
final PoolImpl pool = (PoolImpl)PoolManager.find("FailoverPool");
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
if (pool.getPrimary() == null) {
return false;
}
if (pool.getRedundants().size() < numBackups) {
return false;
}
return true;
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 20 * 1000, 200, true);
assertNotNull(pool.getPrimary());
assertTrue("backups="+pool.getRedundants() + " expected=" + numBackups,
pool.getRedundants().size() >= numBackups);
}
public static void registerInterestList()
{
try {
Region r = cache.getRegion("/"+ regionName);
assertNotNull(r);
r.registerInterest("key-1");
r.registerInterest("key-2");
r.registerInterest("key-3");
r.registerInterest("key-4");
r.registerInterest("key-5");
}
catch (Exception ex) {
fail("failed while registering keys k1 to k5", ex);
}
}
public static void createEntries()
{
try {
Region r = cache.getRegion("/"+ regionName);
assertNotNull(r);
r.create("key-1", "key-1");
r.create("key-2", "key-2");
r.create("key-3", "key-3");
r.create("key-4", "key-4");
r.create("key-5", "key-5");
}
catch (Exception ex) {
fail("failed while createEntries()", ex);
}
}
public static void stopServer()
{
try {
Iterator iter = cache.getBridgeServers().iterator();
if (iter.hasNext()) {
BridgeServer server = (BridgeServer)iter.next();
server.stop();
}
}
catch (Exception e) {
fail("failed while stopServer()" + e);
}
}
public static void put()
{
try {
Region r = cache.getRegion("/"+ regionName);
assertNotNull(r);
r.put("key-1", "value-1");
r.put("key-2", "value-2");
r.put("key-3", "value-3");
}
catch (Exception ex) {
fail("failed while r.put()", ex);
}
}
public void verifyEntries()
{
final Region r = cache.getRegion("/"+regionName);
assertNotNull(r);
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return !r.getEntry("key-3").getValue().equals("key-3");
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 20 * 1000, 200, true);
assertEquals("value-1", r.getEntry("key-1").getValue());
assertEquals("value-2", r.getEntry("key-2").getValue());
assertEquals("value-3", r.getEntry("key-3").getValue());
}
public static void setBridgeObserver() {
PoolImpl.BEFORE_PRIMARY_IDENTIFICATION_FROM_BACKUP_CALLBACK_FLAG = true;
BridgeObserverHolder.setInstance(new BridgeObserverAdapter() {
public void beforePrimaryIdentificationFromBackup() {
primary.invoke(FailoverDUnitTest.class, "putDuringFailover");
PoolImpl.BEFORE_PRIMARY_IDENTIFICATION_FROM_BACKUP_CALLBACK_FLAG = false;
}
});
}
public static void putDuringFailover()
{
try {
Region r = cache.getRegion("/"+ regionName);
assertNotNull(r);
r.put("key-4", "value-4");
r.put("key-5", "value-5");
}
catch (Exception ex) {
fail("failed while r.putDuringFailover()", ex);
}
}
public void verifyEntriesAfterFailover()
{
final Region r = cache.getRegion("/"+ regionName);
assertNotNull(r);
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return !r.getEntry("key-5").getValue().equals("key-5");
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 20 * 1000, 200, true);
assertEquals("value-5", r.getEntry("key-5").getValue());
assertEquals("value-4", r.getEntry("key-4").getValue());
}
public void tearDown2() throws Exception
{
super.tearDown2();
// close the clients first
closeCache();
// then close the servers
vm0.invoke(FailoverDUnitTest.class, "closeCache");
vm1.invoke(FailoverDUnitTest.class, "closeCache");
CacheServerTestUtil.resetDisableShufflingOfEndpointsFlag();
}
public static void closeCache()
{
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
cache = null;
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.hadoop.hive.ql.lockmgr;
import com.cronutils.utils.StringUtils;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.LockComponentBuilder;
import org.apache.hadoop.hive.metastore.LockRequestBuilder;
import org.apache.hadoop.hive.metastore.api.LockComponent;
import org.apache.hadoop.hive.metastore.api.LockResponse;
import org.apache.hadoop.hive.metastore.api.LockState;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
import org.apache.hadoop.hive.metastore.api.TxnType;
import org.apache.hadoop.hive.metastore.txn.TxnCommonUtils;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.ddl.database.lock.LockDatabaseDesc;
import org.apache.hadoop.hive.ql.ddl.database.unlock.UnlockDatabaseDesc;
import org.apache.hadoop.hive.ql.ddl.table.lock.LockTableDesc;
import org.apache.hadoop.hive.ql.ddl.table.lock.UnlockTableDesc;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.common.util.ShutdownHookManager;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
/**
* An implementation of HiveTxnManager that stores the transactions in the metastore database.
* There should be 1 instance o {@link DbTxnManager} per {@link org.apache.hadoop.hive.ql.session.SessionState}
* with a single thread accessing it at a time, with the exception of {@link #heartbeat()} method.
* The later may (usually will) be called from a timer thread.
* See {@link #getMS()} for more important concurrency/metastore access notes.
*
* Each statement that the TM (transaction manager) should be aware of should belong to a transaction.
* Effectively, that means any statement that has side effects. Exceptions are statements like
* Show Compactions, Show Tables, Use Database foo, etc. The transaction is started either
* explicitly ( via Start Transaction SQL statement from end user - not fully supported) or
* implicitly by the {@link org.apache.hadoop.hive.ql.Driver} (which looks exactly as autoCommit=true
* from end user poit of view). See more at {@link #isExplicitTransaction}.
*/
public final class DbTxnManager extends HiveTxnManagerImpl {
static final private String CLASS_NAME = DbTxnManager.class.getName();
static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
public static final String GLOBAL_LOCKS = "__GLOBAL_LOCKS";
private volatile DbLockManager lockMgr = null;
/**
* The Metastore TXNS sequence is initialized to 1.
* Thus is 1 is first transaction id.
*/
private volatile long txnId = 0;
/**
* The local cache of table write IDs allocated/created by the current transaction
*/
private Map<String, Long> tableWriteIds = new HashMap<>();
/**
* assigns a unique monotonically increasing ID to each statement
* which is part of an open transaction. This is used by storage
* layer (see {@link org.apache.hadoop.hive.ql.io.AcidUtils#deltaSubdir(long, long, int)})
* to keep apart multiple writes of the same data within the same transaction
* Also see {@link org.apache.hadoop.hive.ql.io.AcidOutputFormat.Options}.
*/
private int stmtId = -1;
/**
* counts number of statements in the current transaction.
*/
private int numStatements = 0;
/**
* if {@code true} it means current transaction is started via START TRANSACTION which means it cannot
* include any Operations which cannot be rolled back (drop partition; write to non-acid table).
* If false, it's a single statement transaction which can include any statement. This is not a
* contradiction from the user point of view who doesn't know anything about the implicit txn
* and cannot call rollback (the statement of course can fail in which case there is nothing to
* rollback (assuming the statement is well implemented)).
*
* This is done so that all commands run in a transaction which simplifies implementation and
* allows a simple implementation of multi-statement txns which don't require a lock manager
* capable of deadlock detection. (todo: not fully implemented; elaborate on how this LM works)
*
* Also, critically important, ensuring that everything runs in a transaction assigns an order
* to all operations in the system - needed for replication/DR.
*
* We don't want to allow non-transactional statements in a user demarcated txn because the effect
* of such statement is "visible" immediately on statement completion, but the user may
* issue a rollback but the action of the statement can't be undone (and has possibly already been
* seen by another txn). For example,
* start transaction
* insert into transactional_table values(1);
* insert into non_transactional_table select * from transactional_table;
* rollback
*
* The user would be in for a surprise especially if they are not aware of transactional
* properties of the tables involved.
*
* As a side note: what should the lock manager do with locks for non-transactional resources?
* Should it it release them at the end of the stmt or txn?
* Some interesting thoughts: http://mysqlmusings.blogspot.com/2009/02/mixing-engines-in-transactions.html.
*/
private boolean isExplicitTransaction = false;
/**
* To ensure transactions don't nest.
*/
private int startTransactionCount = 0;
// QueryId for the query in current transaction
private String queryId;
// ExecutorService for sending heartbeat to metastore periodically.
private static ScheduledExecutorService heartbeatExecutorService = null;
private ScheduledFuture<?> heartbeatTask = null;
private static final int SHUTDOWN_HOOK_PRIORITY = 0;
private final ReentrantLock heartbeatTaskLock = new ReentrantLock();
//Contains database under replication name for hive replication transactions (dump and load operation)
private String replPolicy;
/**
* We do this on every call to make sure TM uses same MS connection as is used by the caller (Driver,
* SemanticAnalyzer, etc). {@code Hive} instances are cached using ThreadLocal and
* {@link IMetaStoreClient} is cached within {@code Hive} with additional logic. Futhermore, this
* ensures that multiple threads are not sharing the same Thrift client (which could happen
* if we had cached {@link IMetaStoreClient} here.
*
* ThreadLocal gets cleaned up automatically when its thread goes away
* https://docs.oracle.com/javase/7/docs/api/java/lang/ThreadLocal.html. This is especially
* important for threads created by {@link #heartbeatExecutorService} threads.
*
* Embedded {@link DbLockManager} follows the same logic.
* @return IMetaStoreClient
* @throws LockException on any errors
*/
IMetaStoreClient getMS() throws LockException {
try {
return Hive.get(conf).getMSC();
}
catch(HiveException|MetaException e) {
String msg = "Unable to reach Hive Metastore: " + e.getMessage();
LOG.error(msg, e);
throw new LockException(e);
}
}
DbTxnManager() {
}
@Override
void setHiveConf(HiveConf conf) {
super.setHiveConf(conf);
if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY)) {
throw new RuntimeException(ErrorMsg.DBTXNMGR_REQUIRES_CONCURRENCY.getMsg());
}
}
@Override
public List<Long> replOpenTxn(String replPolicy, List<Long> srcTxnIds, String user) throws LockException {
try {
return getMS().replOpenTxn(replPolicy, srcTxnIds, user, TxnType.REPL_CREATED);
} catch (TException e) {
throw new LockException(e, ErrorMsg.METASTORE_COMMUNICATION_FAILED);
}
}
@Override
public long openTxn(Context ctx, String user) throws LockException {
return openTxn(ctx, user, TxnType.DEFAULT, 0);
}
@Override
public long openTxn(Context ctx, String user, TxnType txnType) throws LockException {
return openTxn(ctx, user, txnType, 0);
}
@VisibleForTesting
long openTxn(Context ctx, String user, TxnType txnType, long delay) throws LockException {
/*Q: why don't we lock the snapshot here??? Instead of having client make an explicit call
whenever it chooses
A: If we want to rely on locks for transaction scheduling we must get the snapshot after lock
acquisition. Relying on locks is a pessimistic strategy which works better under high
contention.*/
init();
getLockManager();
if(isTxnOpen()) {
throw new LockException("Transaction already opened. " + JavaUtils.txnIdToString(txnId));
}
try {
replPolicy = ctx.getReplPolicy();
if (replPolicy != null) {
txnId = getMS().replOpenTxn(replPolicy, null, user, txnType).get(0);
} else {
txnId = getMS().openTxn(user, txnType);
}
stmtId = 0;
numStatements = 0;
tableWriteIds.clear();
isExplicitTransaction = false;
startTransactionCount = 0;
this.queryId = ctx.getConf().get(HiveConf.ConfVars.HIVEQUERYID.varname);
LOG.info("Opened " + JavaUtils.txnIdToString(txnId));
ctx.setHeartbeater(startHeartbeat(delay));
return txnId;
} catch (TException e) {
throw new LockException(e, ErrorMsg.METASTORE_COMMUNICATION_FAILED);
}
}
/**
* we don't expect multiple threads to call this method concurrently but {@link #lockMgr} will
* be read by a different threads than one writing it, thus it's {@code volatile}
*/
@Override
public HiveLockManager getLockManager() throws LockException {
init();
if (lockMgr == null) {
lockMgr = new DbLockManager(conf, this);
}
return lockMgr;
}
@Override
public void acquireLocks(QueryPlan plan, Context ctx, String username) throws LockException {
try {
acquireLocksWithHeartbeatDelay(plan, ctx, username, 0);
}
catch(LockException e) {
if(e.getCause() instanceof TxnAbortedException) {
resetTxnInfo();
}
throw e;
}
}
/**
* Watermark to include in error msgs and logs
* @param queryPlan
* @return
*/
private static String getQueryIdWaterMark(QueryPlan queryPlan) {
return "queryId=" + queryPlan.getQueryId();
}
private void markExplicitTransaction(QueryPlan queryPlan) throws LockException {
isExplicitTransaction = true;
if(++startTransactionCount > 1) {
throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_IN_TXN, queryPlan.getOperationName(),
JavaUtils.txnIdToString(getCurrentTxnId()), queryPlan.getQueryId());
}
}
/**
* Ensures that the current SQL statement is appropriate for the current state of the
* Transaction Manager (e.g. can call commit unless you called start transaction)
*
* Note that support for multi-statement txns is a work-in-progress so it's only supported in
* HiveConf#HIVE_IN_TEST/HiveConf#TEZ_HIVE_IN_TEST.
* @param queryPlan
* @throws LockException
*/
private void verifyState(QueryPlan queryPlan) throws LockException {
if(!isTxnOpen()) {
throw new LockException("No transaction context for operation: " + queryPlan.getOperationName() +
" for " + getQueryIdWaterMark(queryPlan));
}
if(queryPlan.getOperation() == null) {
throw new IllegalStateException("Unknown HiveOperation(null) for " + getQueryIdWaterMark(queryPlan));
}
numStatements++;
switch (queryPlan.getOperation()) {
case START_TRANSACTION:
markExplicitTransaction(queryPlan);
break;
case COMMIT:
case ROLLBACK:
if(!isTxnOpen()) {
throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, queryPlan.getOperationName());
}
if(!isExplicitTransaction) {
throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_IN_IMPLICIT_TXN, queryPlan.getOperationName());
}
break;
default:
if(!queryPlan.getOperation().isAllowedInTransaction() && isExplicitTransaction) {
if(allowOperationInATransaction(queryPlan)) {
break;
}
//look at queryPlan.outputs(WriteEntity.t - that's the table)
//for example, drop table in an explicit txn is not allowed
//in some cases this requires looking at more than just the operation
//for example HiveOperation.LOAD - OK if target is MM table but not OK if non-acid table
throw new LockException(null, ErrorMsg.OP_NOT_ALLOWED_IN_TXN, queryPlan.getOperationName(),
JavaUtils.txnIdToString(getCurrentTxnId()), queryPlan.getQueryId());
}
}
/*
Should we allow writing to non-transactional tables in an explicit transaction? The user may
issue ROLLBACK but these tables won't rollback.
Can do this by checking ReadEntity/WriteEntity to determine whether it's reading/writing
any non acid and raise an appropriate error
* Driver.acidSinks and Driver.transactionalInQuery can be used if any acid is in the query*/
}
/**
* This modifies the logic wrt what operations are allowed in a transaction. Multi-statement
* transaction support is incomplete but it makes some Acid tests cases much easier to write.
*/
private boolean allowOperationInATransaction(QueryPlan queryPlan) {
//Acid and MM tables support Load Data with transactional semantics. This will allow Load Data
//in a txn assuming we can determine the target is a suitable table type.
if(queryPlan.getOperation() == HiveOperation.LOAD && queryPlan.getOutputs() != null && queryPlan.getOutputs().size() == 1) {
WriteEntity writeEntity = queryPlan.getOutputs().iterator().next();
if(AcidUtils.isTransactionalTable(writeEntity.getTable())) {
switch (writeEntity.getWriteType()) {
case INSERT:
//allow operation in a txn
return true;
case INSERT_OVERWRITE:
//see HIVE-18154
return false;
default:
//not relevant for LOAD
return false;
}
}
}
//todo: handle Insert Overwrite as well: HIVE-18154
return false;
}
/**
* Normally client should call {@link #acquireLocks(org.apache.hadoop.hive.ql.QueryPlan, org.apache.hadoop.hive.ql.Context, String)}
* @param isBlocking if false, the method will return immediately; thus the locks may be in LockState.WAITING
* @return null if no locks were needed
*/
@VisibleForTesting
LockState acquireLocks(QueryPlan plan, Context ctx, String username, boolean isBlocking) throws LockException {
init();
// Make sure we've built the lock manager
getLockManager();
verifyState(plan);
queryId = plan.getQueryId();
switch (plan.getOperation()) {
case SET_AUTOCOMMIT:
/**This is here for documentation purposes. This TM doesn't support this - only has one
* mode of operation documented at {@link DbTxnManager#isExplicitTransaction}*/
return null;
}
LockRequestBuilder rqstBuilder = new LockRequestBuilder(queryId);
//link queryId to txnId
LOG.info("Setting lock request transaction to " + JavaUtils.txnIdToString(txnId) + " for queryId=" + queryId);
rqstBuilder.setTransactionId(txnId)
.setUser(username);
rqstBuilder.setZeroWaitReadEnabled(!conf.getBoolVar(HiveConf.ConfVars.TXN_OVERWRITE_X_LOCK) ||
!conf.getBoolVar(HiveConf.ConfVars.TXN_WRITE_X_LOCK));
// Make sure we need locks. It's possible there's nothing to lock in
// this operation.
if(plan.getInputs().isEmpty() && plan.getOutputs().isEmpty()) {
LOG.debug("No locks needed for queryId=" + queryId);
return null;
}
List<LockComponent> lockComponents = AcidUtils.makeLockComponents(plan.getOutputs(), plan.getInputs(),
ctx.getOperation(), conf);
lockComponents.addAll(getGlobalLocks(ctx.getConf()));
//It's possible there's nothing to lock even if we have w/r entities.
if (lockComponents.isEmpty()) {
LOG.debug("No locks needed for queryId=" + queryId);
return null;
}
rqstBuilder.addLockComponents(lockComponents);
List<HiveLock> locks = new ArrayList<HiveLock>(1);
LockState lockState = lockMgr.lock(rqstBuilder.build(), queryId, isBlocking, locks);
ctx.setHiveLocks(locks);
return lockState;
}
private Collection<LockComponent> getGlobalLocks(Configuration conf) {
String lockNames = conf.get(Constants.HIVE_QUERY_EXCLUSIVE_LOCK);
if (StringUtils.isEmpty(lockNames)) {
return Collections.emptyList();
}
List<LockComponent> globalLocks = new ArrayList<LockComponent>();
for (String lockName : lockNames.split(",")) {
lockName = lockName.trim();
if (StringUtils.isEmpty(lockName)) {
continue;
}
LockComponentBuilder compBuilder = new LockComponentBuilder();
compBuilder.setExclusive();
compBuilder.setOperationType(DataOperationType.UPDATE);
compBuilder.setDbName(GLOBAL_LOCKS);
compBuilder.setTableName(lockName);
globalLocks.add(compBuilder.build());
LOG.debug("Adding global lock: " + lockName);
}
return globalLocks;
}
/**
* @param delay time to delay for first heartbeat
*/
@VisibleForTesting
void acquireLocksWithHeartbeatDelay(QueryPlan plan, Context ctx, String username, long delay) throws LockException {
LockState ls = acquireLocks(plan, ctx, username, true);
if (ls != null && !isTxnOpen()) { // If there's no lock, we don't need to do heartbeat
// Start heartbeat for read-only queries which don't open transactions but requires locks.
// For those that require transactions, the heartbeat has already been started in openTxn.
ctx.setHeartbeater(startHeartbeat(delay));
}
}
@Override
public void releaseLocks(List<HiveLock> hiveLocks) throws LockException {
if (lockMgr != null) {
stopHeartbeat();
lockMgr.releaseLocks(hiveLocks);
}
}
private void clearLocksAndHB() throws LockException {
lockMgr.clearLocalLockRecords();
stopHeartbeat();
}
private void resetTxnInfo() {
txnId = 0;
stmtId = -1;
numStatements = 0;
tableWriteIds.clear();
queryId = null;
replPolicy = null;
}
@Override
public void replCommitTxn(CommitTxnRequest rqst) throws LockException {
try {
if (rqst.isSetReplLastIdInfo()) {
if (!isTxnOpen()) {
throw new RuntimeException("Attempt to commit before opening a transaction");
}
// For transaction started internally by repl load command, heartbeat needs to be stopped.
clearLocksAndHB();
}
getMS().commitTxn(rqst);
} catch (NoSuchTxnException e) {
LOG.error("Metastore could not find " + JavaUtils.txnIdToString(rqst.getTxnid()));
throw new LockException(e, ErrorMsg.TXN_NO_SUCH_TRANSACTION, JavaUtils.txnIdToString(rqst.getTxnid()));
} catch (TxnAbortedException e) {
LockException le = new LockException(e, ErrorMsg.TXN_ABORTED,
JavaUtils.txnIdToString(rqst.getTxnid()), e.getMessage());
LOG.error(le.getMessage());
throw le;
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
} finally {
if (rqst.isSetReplLastIdInfo()) {
// For transaction started internally by repl load command, needs to clear the txn info.
resetTxnInfo();
}
}
}
@Override
public void commitTxn() throws LockException {
if (!isTxnOpen()) {
throw new RuntimeException("Attempt to commit before opening a transaction");
}
try {
// do all new clear in clearLocksAndHB method to make sure that same code is there for replCommitTxn flow.
clearLocksAndHB();
LOG.debug("Committing txn " + JavaUtils.txnIdToString(txnId));
CommitTxnRequest commitTxnRequest = new CommitTxnRequest(txnId);
commitTxnRequest.setExclWriteEnabled(conf.getBoolVar(HiveConf.ConfVars.TXN_WRITE_X_LOCK));
if (replPolicy != null) {
commitTxnRequest.setReplPolicy(replPolicy);
commitTxnRequest.setTxn_type(TxnType.DEFAULT);
}
getMS().commitTxn(commitTxnRequest);
} catch (NoSuchTxnException e) {
LOG.error("Metastore could not find " + JavaUtils.txnIdToString(txnId));
throw new LockException(e, ErrorMsg.TXN_NO_SUCH_TRANSACTION, JavaUtils.txnIdToString(txnId));
} catch (TxnAbortedException e) {
LockException le = new LockException(e, ErrorMsg.TXN_ABORTED, JavaUtils.txnIdToString(txnId), e.getMessage());
LOG.error(le.getMessage());
throw le;
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(),
e);
} finally {
// do all new reset in resetTxnInfo method to make sure that same code is there for replCommitTxn flow.
resetTxnInfo();
}
}
@Override
public void replRollbackTxn(String replPolicy, long srcTxnId) throws LockException {
try {
getMS().replRollbackTxn(srcTxnId, replPolicy, TxnType.REPL_CREATED);
} catch (NoSuchTxnException e) {
LOG.error("Metastore could not find " + JavaUtils.txnIdToString(srcTxnId));
throw new LockException(e, ErrorMsg.TXN_NO_SUCH_TRANSACTION, JavaUtils.txnIdToString(srcTxnId));
} catch (TxnAbortedException e) {
LockException le = new LockException(e, ErrorMsg.TXN_ABORTED, JavaUtils.txnIdToString(srcTxnId), e.getMessage());
LOG.error(le.getMessage());
throw le;
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public void rollbackTxn() throws LockException {
if (!isTxnOpen()) {
throw new RuntimeException("Attempt to rollback before opening a transaction");
}
stopHeartbeat();
try {
lockMgr.clearLocalLockRecords();
LOG.debug("Rolling back " + JavaUtils.txnIdToString(txnId));
// Re-checking as txn could have been closed, in the meantime, by a competing thread.
if (isTxnOpen()) {
if (replPolicy != null) {
getMS().replRollbackTxn(txnId, replPolicy, TxnType.DEFAULT);
} else {
getMS().rollbackTxn(txnId);
}
} else {
LOG.warn("Transaction is already closed.");
}
} catch (NoSuchTxnException e) {
LOG.error("Metastore could not find " + JavaUtils.txnIdToString(txnId));
throw new LockException(e, ErrorMsg.TXN_NO_SUCH_TRANSACTION, JavaUtils.txnIdToString(txnId));
} catch(TxnAbortedException e) {
throw new LockException(e, ErrorMsg.TXN_ABORTED, JavaUtils.txnIdToString(txnId));
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(),
e);
} finally {
resetTxnInfo();
}
}
@Override
public void replTableWriteIdState(String validWriteIdList, String dbName, String tableName, List<String> partNames)
throws LockException {
try {
getMS().replTableWriteIdState(validWriteIdList, dbName, tableName, partNames);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public void heartbeat() throws LockException {
List<HiveLock> locks;
if(isTxnOpen()) {
// Create one dummy lock so we can go through the loop below, though we only
//really need txnId
locks = Collections.singletonList(new DbLockManager.DbHiveLock(0L));
}
else {
locks = lockMgr.getLocks(false, false);
}
if(LOG.isInfoEnabled()) {
StringBuilder sb = new StringBuilder("Sending heartbeat for ")
.append(JavaUtils.txnIdToString(txnId)).append(" and");
for(HiveLock lock : locks) {
sb.append(" ").append(lock.toString());
}
LOG.info(sb.toString());
}
if(!isTxnOpen() && locks.isEmpty()) {
// No locks, no txn, we outta here.
LOG.debug("No need to send heartbeat as there is no transaction and no locks.");
return;
}
for (HiveLock lock : locks) {
long lockId = ((DbLockManager.DbHiveLock)lock).lockId;
try {
/**
* This relies on the ThreadLocal caching, which implies that the same {@link IMetaStoreClient},
* in particular the Thrift connection it uses is never shared between threads
*/
getMS().heartbeat(txnId, lockId);
} catch (NoSuchLockException e) {
LOG.error("Unable to find lock " + JavaUtils.lockIdToString(lockId));
throw new LockException(e, ErrorMsg.LOCK_NO_SUCH_LOCK, JavaUtils.lockIdToString(lockId));
} catch (NoSuchTxnException e) {
LOG.error("Unable to find transaction " + JavaUtils.txnIdToString(txnId));
throw new LockException(e, ErrorMsg.TXN_NO_SUCH_TRANSACTION, JavaUtils.txnIdToString(txnId));
} catch (TxnAbortedException e) {
LockException le = new LockException(e, ErrorMsg.TXN_ABORTED, JavaUtils.txnIdToString(txnId), e.getMessage());
LOG.error(le.getMessage());
throw le;
} catch (TException e) {
throw new LockException(
ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg() + "(" + JavaUtils.txnIdToString(txnId)
+ "," + lock.toString() + ")", e);
}
}
}
/**
* Start the heartbeater threadpool and return the task.
* @param initialDelay time to delay before first execution, in milliseconds
* @return heartbeater
*/
private Heartbeater startHeartbeat(long initialDelay) throws LockException {
long heartbeatInterval = getHeartbeatInterval(conf);
assert heartbeatInterval > 0;
UserGroupInformation currentUser;
try {
currentUser = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
throw new LockException("error while getting current user,", e);
}
try {
heartbeatTaskLock.lock();
if (heartbeatTask != null) {
throw new IllegalStateException("Heartbeater is already started.");
}
Heartbeater heartbeater = new Heartbeater(this, conf, queryId, currentUser);
heartbeatTask = startHeartbeat(initialDelay, heartbeatInterval, heartbeater);
LOG.debug("Started heartbeat with delay/interval = " + initialDelay + "/" + heartbeatInterval +
" " + TimeUnit.MILLISECONDS + " for query: " + queryId);
return heartbeater;
} finally {
heartbeatTaskLock.unlock();
}
}
private ScheduledFuture<?> startHeartbeat(long initialDelay, long heartbeatInterval, Runnable heartbeater) {
// For negative testing purpose..
if(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST) && conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODEFAILHEARTBEATER)) {
initialDelay = 0;
} else if (initialDelay == 0) {
/*make initialDelay a random number in [0, 0.75*heartbeatInterval] so that if a lot
of queries land on the server at the same time and all get blocked on lack of
resources, that they all don't start heartbeating at the same time*/
initialDelay = (long)Math.floor(heartbeatInterval * 0.75 * Math.random());
}
ScheduledFuture<?> task = heartbeatExecutorService.scheduleAtFixedRate(
heartbeater, initialDelay, heartbeatInterval, TimeUnit.MILLISECONDS);
return task;
}
private void stopHeartbeat() {
if (heartbeatTask == null) {
// avoid unnecessary locking if the field is null
return;
}
boolean isLockAcquired = false;
try {
// The lock should not be held by other thread trying to stop the heartbeat for more than 31 seconds
isLockAcquired = heartbeatTaskLock.tryLock(31000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
// safe to go on
}
try {
if (isLockAcquired && heartbeatTask != null) {
heartbeatTask.cancel(true);
long startTime = System.currentTimeMillis();
long sleepInterval = 100;
while (!heartbeatTask.isCancelled() && !heartbeatTask.isDone()) {
// We will wait for 30 seconds for the task to be cancelled.
// If it's still not cancelled (unlikely), we will just move on.
long now = System.currentTimeMillis();
if (now - startTime > 30000) {
LOG.warn("Heartbeat task cannot be cancelled for unknown reason. QueryId: " + queryId);
break;
}
try {
Thread.sleep(sleepInterval);
} catch (InterruptedException e) {
}
sleepInterval *= 2;
}
if (heartbeatTask.isCancelled() || heartbeatTask.isDone()) {
LOG.info("Stopped heartbeat for query: " + queryId);
}
heartbeatTask = null;
queryId = null;
}
} finally {
if (isLockAcquired) {
heartbeatTaskLock.unlock();
}
}
}
@Override
public GetOpenTxnsResponse getOpenTxns() throws LockException {
try {
return getMS().getOpenTxns();
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public ValidTxnList getValidTxns() throws LockException {
assert isTxnOpen();
init();
try {
return getMS().getValidTxns(txnId);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public ValidTxnList getValidTxns(List<TxnType> excludeTxnTypes) throws LockException {
assert isTxnOpen();
init();
try {
return getMS().getValidTxns(txnId, excludeTxnTypes);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public ValidTxnWriteIdList getValidWriteIds(List<String> tableList,
String validTxnList) throws LockException {
assert isTxnOpen();
assert validTxnList != null && !validTxnList.isEmpty();
try {
return TxnCommonUtils.createValidTxnWriteIdList(
txnId, getMS().getValidWriteIds(tableList, validTxnList));
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public String getTxnManagerName() {
return CLASS_NAME;
}
@Override
public boolean supportsExplicitLock() {
return false;
}
@Override
public int lockTable(Hive db, LockTableDesc lockTbl) throws HiveException {
super.lockTable(db, lockTbl);
throw new UnsupportedOperationException();
}
@Override
public int unlockTable(Hive hiveDB, UnlockTableDesc unlockTbl) throws HiveException {
super.unlockTable(hiveDB, unlockTbl);
throw new UnsupportedOperationException();
}
@Override
public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveException {
super.lockDatabase(hiveDB, lockDb);
throw new UnsupportedOperationException();
}
@Override
public int unlockDatabase(Hive hiveDB, UnlockDatabaseDesc unlockDb) throws HiveException {
super.unlockDatabase(hiveDB, unlockDb);
throw new UnsupportedOperationException();
}
@Override
public boolean useNewShowLocksFormat() {
return true;
}
@Override
public boolean supportsAcid() {
return true;
}
/**
* In an explicit txn start_transaction is the 1st statement and we record the snapshot at the
* start of the txn for Snapshot Isolation. For Read Committed (not supported yet) we'd record
* it before executing each statement (but after lock acquisition if using lock based concurrency
* control).
* For implicit txn, the stmt that triggered/started the txn is the first statement
*/
@Override
public boolean recordSnapshot(QueryPlan queryPlan) {
assert isTxnOpen();
assert numStatements > 0 : "was acquireLocks() called already?";
if(queryPlan.getOperation() == HiveOperation.START_TRANSACTION) {
//here if start of explicit txn
assert isExplicitTransaction;
assert numStatements == 1;
return true;
}
else if(!isExplicitTransaction) {
assert numStatements == 1 : "numStatements=" + numStatements + " in implicit txn";
if (queryPlan.hasAcidResourcesInQuery()) {
//1st and only stmt in implicit txn and uses acid resource
return true;
}
}
return false;
}
@Override
public boolean isImplicitTransactionOpen() {
if(!isTxnOpen()) {
//some commands like "show databases" don't start implicit transactions
return false;
}
if(!isExplicitTransaction) {
assert numStatements == 1 : "numStatements=" + numStatements;
return true;
}
return false;
}
@Override
protected void destruct() {
try {
stopHeartbeat();
if (isTxnOpen()) {
rollbackTxn();
}
if (lockMgr != null) {
lockMgr.close();
}
} catch (Exception e) {
LOG.error("Caught exception " + e.getClass().getName() + " with message <" + e.getMessage()
+ ">, swallowing as there is nothing we can do with it.");
// Not much we can do about it here.
}
}
private void init() throws LockException {
if (conf == null) {
throw new RuntimeException("Must call setHiveConf before any other methods.");
}
initHeartbeatExecutorService(conf.getIntVar(HiveConf.ConfVars.HIVE_TXN_HEARTBEAT_THREADPOOL_SIZE));
}
private synchronized static void initHeartbeatExecutorService(int corePoolSize) {
if(heartbeatExecutorService != null) {
return;
}
// The following code will be executed only once when the service is not initialized
heartbeatExecutorService =
Executors.newScheduledThreadPool(
corePoolSize,
new ThreadFactory() {
private final AtomicInteger threadCounter = new AtomicInteger();
@Override
public Thread newThread(Runnable r) {
return new HeartbeaterThread(r, "Heartbeater-" + threadCounter.getAndIncrement());
}
});
((ScheduledThreadPoolExecutor) heartbeatExecutorService).setRemoveOnCancelPolicy(true);
ShutdownHookManager.addShutdownHook(DbTxnManager::shutdownHeartbeatExecutorService, SHUTDOWN_HOOK_PRIORITY);
}
private synchronized static void shutdownHeartbeatExecutorService() {
if (heartbeatExecutorService != null && !heartbeatExecutorService.isShutdown()) {
LOG.info("Shutting down Heartbeater thread pool.");
heartbeatExecutorService.shutdown();
}
}
public static class HeartbeaterThread extends Thread {
HeartbeaterThread(Runnable target, String name) {
super(target, name);
setDaemon(true);
}
}
@Override
public boolean isTxnOpen() {
return txnId > 0;
}
@Override
public long getCurrentTxnId() {
return txnId;
}
@Override
public int getStmtIdAndIncrement() {
assert isTxnOpen();
return stmtId++;
}
@Override
public int getCurrentStmtId() {
assert isTxnOpen();
return stmtId;
}
@Override
public long getTableWriteId(String dbName, String tableName) throws LockException {
assert isTxnOpen();
return getTableWriteId(dbName, tableName, true);
}
@Override
public long getAllocatedTableWriteId(String dbName, String tableName) throws LockException {
assert isTxnOpen();
// Calls getTableWriteId() with allocateIfNotYet being false
// to return 0 if the dbName:tableName's writeId is yet allocated.
// This happens when the current context is before
// Driver.acquireLocks() is called.
return getTableWriteId(dbName, tableName, false);
}
private long getTableWriteId(
String dbName, String tableName, boolean allocateIfNotYet) throws LockException {
String fullTableName = AcidUtils.getFullTableName(dbName, tableName);
if (tableWriteIds.containsKey(fullTableName)) {
return tableWriteIds.get(fullTableName);
} else if (!allocateIfNotYet) {
return 0;
}
try {
long writeId = getMS().allocateTableWriteId(txnId, dbName, tableName);
LOG.debug("Allocated write ID {} for {}.{}", writeId, dbName, tableName);
tableWriteIds.put(fullTableName, writeId);
return writeId;
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public LockResponse acquireMaterializationRebuildLock(String dbName, String tableName, long txnId) throws LockException {
// Acquire lock
LockResponse lockResponse;
try {
lockResponse = getMS().lockMaterializationRebuild(dbName, tableName, txnId);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
if (lockResponse.getState() == LockState.ACQUIRED) {
// If lock response is ACQUIRED, we can create the heartbeater
long initialDelay = 0L;
long heartbeatInterval = getHeartbeatInterval(conf);
assert heartbeatInterval > 0;
MaterializationRebuildLockHeartbeater heartbeater = new MaterializationRebuildLockHeartbeater(
this, dbName, tableName, queryId, txnId);
ScheduledFuture<?> task = startHeartbeat(initialDelay, heartbeatInterval, heartbeater);
heartbeater.task.set(task);
LOG.debug("Started heartbeat for materialization rebuild lock for {} with delay/interval = {}/{} {} for query: {}",
AcidUtils.getFullTableName(dbName, tableName), initialDelay, heartbeatInterval, TimeUnit.MILLISECONDS, queryId);
}
return lockResponse;
}
@Override
public long getLatestTxnIdInConflict() throws LockException {
try {
return getMS().getLatestTxnIdInConflict(txnId);
} catch (TException e) {
throw new LockException(e);
}
}
private boolean heartbeatMaterializationRebuildLock(String dbName, String tableName, long txnId) throws LockException {
try {
return getMS().heartbeatLockMaterializationRebuild(dbName, tableName, txnId);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
@Override
public void replAllocateTableWriteIdsBatch(String dbName, String tableName, String replPolicy,
List<TxnToWriteId> srcTxnToWriteIdList) throws LockException {
try {
getMS().replAllocateTableWriteIdsBatch(dbName, tableName, replPolicy, srcTxnToWriteIdList);
} catch (TException e) {
throw new LockException(ErrorMsg.METASTORE_COMMUNICATION_FAILED.getMsg(), e);
}
}
public static long getHeartbeatInterval(Configuration conf) throws LockException {
// Retrieve HIVE_TXN_TIMEOUT in MILLISECONDS (it's defined as SECONDS),
// then divide it by 2 to give us a safety factor.
long interval =
HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, TimeUnit.MILLISECONDS) / 2;
if (interval == 0) {
throw new LockException(HiveConf.ConfVars.HIVE_TXN_TIMEOUT.toString() + " not set," +
" heartbeats won't be sent");
}
return interval;
}
/**
* Heartbeater thread
*/
public static class Heartbeater implements Runnable {
private HiveTxnManager txnMgr;
private HiveConf conf;
private UserGroupInformation currentUser;
LockException lockException;
private final String queryId;
public LockException getLockException() {
return lockException;
}
/**
*
* @param txnMgr transaction manager for this operation
* @param currentUser
*/
Heartbeater(HiveTxnManager txnMgr, HiveConf conf, String queryId,
UserGroupInformation currentUser) {
this.txnMgr = txnMgr;
this.conf = conf;
this.currentUser = currentUser;
lockException = null;
this.queryId = queryId;
}
/**
* Send a heartbeat to the metastore for locks and transactions.
*/
@Override
public void run() {
try {
// For negative testing purpose..
if(conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST) && conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODEFAILHEARTBEATER)) {
throw new LockException(HiveConf.ConfVars.HIVETESTMODEFAILHEARTBEATER.name() + "=true");
}
LOG.debug("Heartbeating...for currentUser: " + currentUser);
currentUser.doAs((PrivilegedExceptionAction<Object>) () -> {
txnMgr.heartbeat();
return null;
});
} catch (LockException e) {
LOG.error("Failed trying to heartbeat queryId=" + queryId + ", currentUser: "
+ currentUser + ": " + e.getMessage());
lockException = e;
} catch (Throwable t) {
String errorMsg = "Failed trying to heartbeat queryId=" + queryId + ", currentUser: "
+ currentUser + ": " + t.getMessage();
LOG.error(errorMsg, t);
lockException = new LockException(errorMsg, t);
}
}
}
/**
* MaterializationRebuildLockHeartbeater is a runnable that will be run in a
* ScheduledExecutorService in given intervals. Once the heartbeat cannot
* refresh the lock anymore, it will interrupt itself.
*/
private static class MaterializationRebuildLockHeartbeater implements Runnable {
private final DbTxnManager txnMgr;
private final String dbName;
private final String tableName;
private final String queryId;
private final long txnId;
private final AtomicReference<ScheduledFuture<?>> task;
MaterializationRebuildLockHeartbeater(DbTxnManager txnMgr, String dbName, String tableName,
String queryId, long txnId) {
this.txnMgr = txnMgr;
this.queryId = queryId;
this.dbName = dbName;
this.tableName = tableName;
this.txnId = txnId;
this.task = new AtomicReference<>();
}
/**
* Send a heartbeat to the metastore for locks and transactions.
*/
@Override
public void run() {
LOG.trace("Heartbeating materialization rebuild lock for {} for query: {}",
AcidUtils.getFullTableName(dbName, tableName), queryId);
boolean refreshed;
try {
refreshed = txnMgr.heartbeatMaterializationRebuildLock(dbName, tableName, txnId);
} catch (LockException e) {
LOG.error("Failed trying to acquire lock", e);
throw new RuntimeException(e);
}
if (!refreshed) {
// We could not heartbeat the lock, i.e., the operation has finished,
// hence we interrupt this work
ScheduledFuture<?> t = task.get();
if (t != null) {
t.cancel(false);
LOG.debug("Stopped heartbeat for materialization rebuild lock for {} for query: {}",
AcidUtils.getFullTableName(dbName, tableName), queryId);
}
}
}
}
@Override
public String getQueryid() {
return queryId;
}
}
| |
/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.authenticator.mutualssl;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.framework.BundleContext;
import org.osgi.util.tracker.ServiceTracker;
import org.wso2.carbon.core.security.AuthenticatorsConfiguration;
import org.wso2.carbon.core.services.authentication.CarbonServerAuthenticator;
import org.wso2.carbon.core.services.util.CarbonAuthenticationUtil;
import org.wso2.carbon.identity.authenticator.mutualssl.internal.MutualSSLAuthenticatorServiceComponent;
import org.wso2.carbon.user.api.TenantManager;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.utils.AuthenticationObserver;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.lang.StringUtils;
/**
* Authenticator for certificate based two-way authentication
*/
public class MutualSSLAuthenticator implements CarbonServerAuthenticator {
private static final int DEFAULT_PRIORITY_LEVEL = 5;
private static final String AUTHENTICATOR_NAME = "MutualSSLAuthenticator";
private static final String MUTUAL_SSL_URL = "http://mutualssl.carbon.wso2.org";
/**
* Header name of the username for mutual ssl authentication
*/
private static final String USERNAME_HEADER = "UsernameHeader";
/**
* Configuration parameter name for trusted certificates list
*/
private static final String WHITE_LIST = "WhiteList";
/**
* Configuration parameter name for enabling and disabling the trusted certificates list
*/
private static final String WHITE_LIST_ENABLED = "WhiteListEnabled";
/**
* Attribute name for reading client certificate in the request
*/
private static final String JAVAX_SERVLET_REQUEST_CERTIFICATE = "javax.servlet.request.X509Certificate";
/**
* Character encoding for Base64 to String conversions
*/
private static final String CHARACTER_ENCODING = "UTF-8";
/**
* Logger for the class
*/
private static final Log log = LogFactory.getLog(MutualSSLAuthenticator.class);
private static String usernameHeaderName = "UserName";
private static String [] whiteList;
private static boolean whiteListEnabled = false;
private static boolean authenticatorInitialized = false;
/**
* Initialize Mutual SSL Authenticator Configuration
*/
private synchronized static void init() {
AuthenticatorsConfiguration authenticatorsConfiguration = AuthenticatorsConfiguration.getInstance();
// Read configuration for mutual ssl authenticator
AuthenticatorsConfiguration.AuthenticatorConfig authenticatorConfig =
authenticatorsConfiguration.getAuthenticatorConfig(AUTHENTICATOR_NAME);
if (authenticatorConfig != null) {
Map<String, String> configParameters = authenticatorConfig.getParameters();
if (configParameters != null) {
if (configParameters.containsKey(USERNAME_HEADER)) {
usernameHeaderName = configParameters.get(USERNAME_HEADER);
}
if (configParameters.containsKey(WHITE_LIST_ENABLED)) {
whiteListEnabled = Boolean.parseBoolean(configParameters.get(WHITE_LIST_ENABLED));
if (log.isDebugEnabled()) {
log.debug("Enabling trusted client certificates list : " + whiteListEnabled);
}
}
if (whiteListEnabled) {
// List of trusted thumbprints for clients is enabled
if (configParameters.containsKey(WHITE_LIST)) {
whiteList = configParameters.get(WHITE_LIST).trim().split(",");
int index = 0;
// Remove whitespaces in the thumbprints of white list
for (String thumbprint : whiteList) {
thumbprint = thumbprint.trim();
whiteList[index] = thumbprint;
if (log.isDebugEnabled()) {
log.debug("Client thumbprint " + thumbprint + " added to the white list" );
}
index++;
}
} else {
log.error("Trusted client certificates list is enabled but empty");
return;
}
}
authenticatorInitialized = true;
}
} else {
if (log.isDebugEnabled()) {
log.debug(AUTHENTICATOR_NAME + " configuration is not set for initialization");
}
}
}
@Override
public int getPriority() {
AuthenticatorsConfiguration authenticatorsConfiguration =
AuthenticatorsConfiguration.getInstance();
AuthenticatorsConfiguration.AuthenticatorConfig authenticatorConfig =
authenticatorsConfiguration.getAuthenticatorConfig(AUTHENTICATOR_NAME);
if (authenticatorConfig != null && authenticatorConfig.getPriority() > 0) {
return authenticatorConfig.getPriority();
}
return DEFAULT_PRIORITY_LEVEL;
}
@Override
public boolean isDisabled() {
AuthenticatorsConfiguration authenticatorsConfiguration =
AuthenticatorsConfiguration.getInstance();
AuthenticatorsConfiguration.AuthenticatorConfig authenticatorConfig =
authenticatorsConfiguration.getAuthenticatorConfig(AUTHENTICATOR_NAME);
if (authenticatorConfig != null) {
return authenticatorConfig.isDisabled();
}
return false;
}
@Override
public boolean authenticateWithRememberMe(MessageContext msgCxt) {
return false;
}
@Override
public String getAuthenticatorName() {
return AUTHENTICATOR_NAME;
}
@Override
public boolean isAuthenticated(MessageContext msgCxt) {
boolean isAuthenticated = false;
HttpServletRequest request = (HttpServletRequest) msgCxt.getProperty(HTTPConstants.MC_HTTP_SERVLETREQUEST);
Object certObject = request.getAttribute(JAVAX_SERVLET_REQUEST_CERTIFICATE);
try {
if (certObject != null) {
if (!authenticatorInitialized) {
init();
}
if (!authenticatorInitialized) {
log.error(AUTHENTICATOR_NAME + " failed initialization");
return false;
}
// <m:UserName xmlns:m="http://mutualssl.carbon.wso2.org"
// soapenv:mustUnderstand="0">234</m:UserName>
boolean trustedThumbprint = false;
String thumbprint = null;
if (certObject instanceof X509Certificate[]) {
X509Certificate[] cert = (X509Certificate[]) certObject;
if (whiteListEnabled && whiteList != null) {
// Client certificate is always in the index 0
thumbprint = getThumbPrint(cert[0]);
if (log.isDebugEnabled()) {
log.debug("Client certificate thumbprint is " + thumbprint);
}
for(String whiteThumbprint : whiteList) {
if (thumbprint.equals(whiteThumbprint)) {
// Thumbprint of the client certificate is in the trusted list
trustedThumbprint = true;
if (log.isDebugEnabled()) {
log.debug("Client certificate thumbprint matched with the white list");
}
break;
}
}
}
}
if (!whiteListEnabled || trustedThumbprint) {
// WhiteList is disabled or client certificate is in the trusted list
String userName = null;
String usernameInHeader = request.getHeader(usernameHeaderName);
boolean validHeader = false;
if (StringUtils.isNotEmpty(usernameInHeader)) {
//username is received in HTTP header encoded in base64
byte[] base64DecodedByteArray = Base64.decode(usernameInHeader);
userName = new String(base64DecodedByteArray, CHARACTER_ENCODING);
validHeader = true;
if (log.isDebugEnabled()) {
log.debug("Username for Mutual SSL : " + userName);
}
}
if (StringUtils.isEmpty(userName)) {
// Username is not received in HTTP Header. Check for SOAP header
SOAPEnvelope envelope = msgCxt.getEnvelope();
SOAPHeader header = envelope.getHeader();
if (header != null) {
ArrayList<SOAPHeaderBlock> headers = header.getHeaderBlocksWithNSURI(MUTUAL_SSL_URL);
if (headers != null) {
for (SOAPHeaderBlock soapHeaderBlock : headers) {
if (usernameHeaderName.equals(soapHeaderBlock.getLocalName())) {
// Username is received in SOAP header
userName = soapHeaderBlock.getText();
validHeader = true;
break;
}
}
}
}
}
if (!validHeader && log.isDebugEnabled()) {
log.debug("'" + usernameHeaderName + "'" + " header is not received in HTTP or SOAP header");
}
if (StringUtils.isNotEmpty(userName)) {
String tenantDomain = MultitenantUtils.getTenantDomain(userName);
userName = MultitenantUtils.getTenantAwareUsername(userName);
TenantManager tenantManager =
MutualSSLAuthenticatorServiceComponent.getRealmService().getTenantManager();
int tenantId = tenantManager.getTenantId(tenantDomain);
handleAuthenticationStarted(tenantId);
UserStoreManager userstore =
MutualSSLAuthenticatorServiceComponent.getRealmService().getTenantUserRealm(tenantId)
.getUserStoreManager();
if (userstore.isExistingUser(userName)) {
// Username used for mutual ssl authentication is a valid user
isAuthenticated = true;
}
if (isAuthenticated) {
CarbonAuthenticationUtil.onSuccessAdminLogin(request.getSession(), userName, tenantId,
tenantDomain, "Mutual SSL Authentication");
handleAuthenticationCompleted(tenantId, true);
isAuthenticated = true;
} else {
if (log.isDebugEnabled()) {
log.debug("Authentication rquest is rejected. User " + userName +
" does not exist in userstore");
}
CarbonAuthenticationUtil.onFailedAdminLogin(request.getSession(), userName, tenantId,
"Mutual SSL Authentication", "User does not exist in userstore");
handleAuthenticationCompleted(tenantId, false);
isAuthenticated = false;
}
}
} else{
if (log.isDebugEnabled()) {
log.debug("Client Thumbprint " + thumbprint + " is not in the White List of " + AUTHENTICATOR_NAME);
}
}
} else {
throw new IllegalStateException("The certificate cannot be empty");
}
} catch (Exception e) {
log.error("Error authenticating the user " + e.getMessage(), e);
}
return isAuthenticated;
}
@Override
public boolean isHandle(MessageContext msgCxt) {
boolean canHandle = false;
if (!isDisabled()) {
if (!authenticatorInitialized) {
init();
if (!authenticatorInitialized) {
return canHandle;
}
}
HttpServletRequest request = (HttpServletRequest) msgCxt.getProperty(HTTPConstants.MC_HTTP_SERVLETREQUEST);
String authorizationHeader = request.getHeader(HTTPConstants.HEADER_AUTHORIZATION);
// This authenticator should kickin only if authorization headers are null
if (authorizationHeader == null) {
Object certObject = request.getAttribute(JAVAX_SERVLET_REQUEST_CERTIFICATE);
if (certObject != null) {
SOAPEnvelope envelope = msgCxt.getEnvelope();
SOAPHeader header = envelope.getHeader();
boolean validHeader = false;
if (header != null) {
ArrayList<SOAPHeaderBlock> headers = header.getHeaderBlocksWithNSURI(MUTUAL_SSL_URL);
if (headers != null) {
for(SOAPHeaderBlock soapHeaderBlock : headers ) {
if (usernameHeaderName.equals(soapHeaderBlock.getLocalName())) {
//Username can be in SOAP Header
canHandle = true;
validHeader = true;
break;
}
}
}
}
if (!canHandle && StringUtils.isNotEmpty(request.getHeader(usernameHeaderName))) {
validHeader = true;
// Username is received in HTTP Header
canHandle = true;
}
if (!validHeader && log.isDebugEnabled()) {
log.debug("'" + usernameHeaderName + "'" + " header is not received in HTTP or SOAP header");
}
} else {
if (log.isDebugEnabled()) {
log.debug("Server is not picking up the client certificate. Mutual SSL authentication is not" +
"done");
}
}
}
} else{
if (log.isDebugEnabled()) {
log.debug("MutualSSLAuthenticator is Disabled.");
}
}
return canHandle;
}
/**
* Helper method to retrieve the thumbprint of a X509 certificate
* @param cert X509 certificate
* @return Thumbprint of the X509 certificate
* @throws NoSuchAlgorithmException
* @throws CertificateEncodingException
*/
private String getThumbPrint(X509Certificate cert) throws NoSuchAlgorithmException, CertificateEncodingException {
MessageDigest md = MessageDigest.getInstance("SHA-1");
byte[] certEncoded = cert.getEncoded();
md.update(certEncoded);
return hexify(md.digest());
}
/**
* Helper method to hexify a byte array.
* @param bytes Bytes of message digest
* @return Hexadecimal representation
*/
private String hexify(byte bytes[]) {
StringBuilder builder = new StringBuilder(bytes.length * 2);
char[] hexDigits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' };
for (byte byteValue : bytes) {
builder.append(hexDigits[(byteValue & 0xf0) >> 4]).append(hexDigits[byteValue & 0x0f]);
}
return builder.toString();
}
private void handleAuthenticationStarted(int tenantId) {
BundleContext bundleContext = MutualSSLAuthenticatorServiceComponent.getBundleContext();
if (bundleContext != null) {
ServiceTracker tracker =
new ServiceTracker(bundleContext,
AuthenticationObserver.class.getName(), null);
tracker.open();
Object[] services = tracker.getServices();
if (services != null) {
for (Object service : services) {
((AuthenticationObserver) service).startedAuthentication(tenantId);
}
}
tracker.close();
}
}
private void handleAuthenticationCompleted(int tenantId, boolean isSuccessful) {
BundleContext bundleContext = MutualSSLAuthenticatorServiceComponent.getBundleContext();
if (bundleContext != null) {
ServiceTracker tracker =
new ServiceTracker(bundleContext,
AuthenticationObserver.class.getName(), null);
tracker.open();
Object[] services = tracker.getServices();
if (services != null) {
for (Object service : services) {
((AuthenticationObserver) service).completedAuthentication(
tenantId, isSuccessful);
}
}
tracker.close();
}
}
}
| |
/**
* Copyright (c) 2013-2021 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.connection;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.EpollDatagramChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollSocketChannel;
import io.netty.channel.kqueue.KQueueDatagramChannel;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.kqueue.KQueueSocketChannel;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.resolver.AddressResolver;
import io.netty.resolver.AddressResolverGroup;
import io.netty.resolver.DefaultAddressResolverGroup;
import io.netty.resolver.dns.DnsServerAddressStreamProviders;
import io.netty.util.*;
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.*;
import io.netty.util.internal.PlatformDependent;
import org.redisson.ElementsSubscribeService;
import org.redisson.Version;
import org.redisson.api.NodeType;
import org.redisson.client.*;
import org.redisson.client.codec.Codec;
import org.redisson.client.protocol.RedisCommand;
import org.redisson.cluster.ClusterSlotRange;
import org.redisson.config.*;
import org.redisson.misc.InfinitySemaphoreLatch;
import org.redisson.misc.RedisURI;
import org.redisson.pubsub.PublishSubscribeService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;
/**
*
* @author Nikita Koksharov
*
*/
public class MasterSlaveConnectionManager implements ConnectionManager {
public static final Timeout DUMMY_TIMEOUT = new Timeout() {
@Override
public Timer timer() {
return null;
}
@Override
public TimerTask task() {
return null;
}
@Override
public boolean isExpired() {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean cancel() {
return true;
}
};
protected final String id;
public static final int MAX_SLOT = 16384;
protected final ClusterSlotRange singleSlotRange = new ClusterSlotRange(0, MAX_SLOT-1);
private final Logger log = LoggerFactory.getLogger(getClass());
private HashedWheelTimer timer;
protected Codec codec;
protected final EventLoopGroup group;
protected final Class<? extends SocketChannel> socketChannelClass;
protected DNSMonitor dnsMonitor;
protected MasterSlaveServersConfig config;
private MasterSlaveEntry masterSlaveEntry;
private final Promise<Void> shutdownPromise = ImmediateEventExecutor.INSTANCE.newPromise();
private final InfinitySemaphoreLatch shutdownLatch = new InfinitySemaphoreLatch();
private IdleConnectionWatcher connectionWatcher;
private final ConnectionEventsHub connectionEventsHub = new ConnectionEventsHub();
private final ExecutorService executor;
private final Config cfg;
protected final AddressResolverGroup<InetSocketAddress> resolverGroup;
private final ElementsSubscribeService elementsSubscribeService = new ElementsSubscribeService(this);
protected PublishSubscribeService subscribeService;
private final Map<RedisURI, RedisConnection> nodeConnections = new ConcurrentHashMap<>();
public MasterSlaveConnectionManager(MasterSlaveServersConfig cfg, Config config, UUID id) {
this(config, id);
this.config = cfg;
if (cfg.getSlaveAddresses().isEmpty()
&& (cfg.getReadMode() == ReadMode.SLAVE || cfg.getReadMode() == ReadMode.MASTER_SLAVE)) {
throw new IllegalArgumentException("Slaves aren't defined. readMode can't be SLAVE or MASTER_SLAVE");
}
initTimer(cfg);
initSingleEntry();
}
protected MasterSlaveConnectionManager(Config cfg, UUID id) {
this.id = id.toString();
Version.logVersion();
if (cfg.getTransportMode() == TransportMode.EPOLL) {
if (cfg.getEventLoopGroup() == null) {
this.group = new EpollEventLoopGroup(cfg.getNettyThreads(), new DefaultThreadFactory("redisson-netty"));
} else {
this.group = cfg.getEventLoopGroup();
}
this.socketChannelClass = EpollSocketChannel.class;
if (PlatformDependent.isAndroid()) {
this.resolverGroup = DefaultAddressResolverGroup.INSTANCE;
} else {
this.resolverGroup = cfg.getAddressResolverGroupFactory().create(EpollDatagramChannel.class, DnsServerAddressStreamProviders.platformDefault());
}
} else if (cfg.getTransportMode() == TransportMode.KQUEUE) {
if (cfg.getEventLoopGroup() == null) {
this.group = new KQueueEventLoopGroup(cfg.getNettyThreads(), new DefaultThreadFactory("redisson-netty"));
} else {
this.group = cfg.getEventLoopGroup();
}
this.socketChannelClass = KQueueSocketChannel.class;
if (PlatformDependent.isAndroid()) {
this.resolverGroup = DefaultAddressResolverGroup.INSTANCE;
} else {
this.resolverGroup = cfg.getAddressResolverGroupFactory().create(KQueueDatagramChannel.class, DnsServerAddressStreamProviders.platformDefault());
}
} else {
if (cfg.getEventLoopGroup() == null) {
this.group = new NioEventLoopGroup(cfg.getNettyThreads(), new DefaultThreadFactory("redisson-netty"));
} else {
this.group = cfg.getEventLoopGroup();
}
this.socketChannelClass = NioSocketChannel.class;
if (PlatformDependent.isAndroid()) {
this.resolverGroup = DefaultAddressResolverGroup.INSTANCE;
} else {
this.resolverGroup = cfg.getAddressResolverGroupFactory().create(NioDatagramChannel.class, DnsServerAddressStreamProviders.platformDefault());
}
}
if (cfg.getExecutor() == null) {
int threads = Runtime.getRuntime().availableProcessors() * 2;
if (cfg.getThreads() != 0) {
threads = cfg.getThreads();
}
executor = Executors.newFixedThreadPool(threads, new DefaultThreadFactory("redisson"));
} else {
executor = cfg.getExecutor();
}
this.cfg = cfg;
this.codec = cfg.getCodec();
if (cfg.getConnectionListener() != null) {
connectionEventsHub.addListener(cfg.getConnectionListener());
}
}
protected void closeNodeConnections() {
nodeConnections.values().stream()
.map(c -> c.getRedisClient().shutdownAsync())
.forEach(f -> f.toCompletableFuture().join());
}
protected void closeNodeConnection(RedisConnection conn) {
if (nodeConnections.values().removeAll(Arrays.asList(conn))) {
conn.closeAsync();
}
}
protected final void disconnectNode(RedisURI addr) {
RedisConnection conn = nodeConnections.remove(addr);
if (conn != null) {
nodeConnections.values().removeAll(Arrays.asList(conn));
conn.closeAsync();
}
}
protected final CompletionStage<RedisConnection> connectToNode(BaseConfig<?> cfg, RedisURI addr, String sslHostname) {
return connectToNode(NodeType.MASTER, cfg, addr, sslHostname);
}
protected final CompletionStage<RedisConnection> connectToNode(NodeType type, BaseConfig<?> cfg, RedisURI addr, String sslHostname) {
RedisConnection conn = nodeConnections.get(addr);
if (conn != null) {
if (!conn.isActive()) {
closeNodeConnection(conn);
} else {
return CompletableFuture.completedFuture(conn);
}
}
RedisClient client = createClient(type, addr, cfg.getConnectTimeout(), cfg.getTimeout(), sslHostname);
CompletionStage<RedisConnection> future = client.connectAsync();
return future.thenCompose(connection -> {
if (connection.isActive()) {
if (!addr.isIP()) {
RedisURI address = new RedisURI(addr.getScheme()
+ "://" + connection.getRedisClient().getAddr().getAddress().getHostAddress()
+ ":" + connection.getRedisClient().getAddr().getPort());
nodeConnections.put(address, connection);
}
nodeConnections.put(addr, connection);
return CompletableFuture.completedFuture(connection);
} else {
connection.closeAsync();
CompletableFuture<RedisConnection> f = new CompletableFuture<>();
f.completeExceptionally(new RedisException("Connection to " + connection.getRedisClient().getAddr() + " is not active!"));
return f;
}
});
}
@Override
public String getId() {
return id;
}
@Override
public boolean isClusterMode() {
return false;
}
@Override
public IdleConnectionWatcher getConnectionWatcher() {
return connectionWatcher;
}
@Override
public Config getCfg() {
return cfg;
}
@Override
public MasterSlaveServersConfig getConfig() {
return config;
}
@Override
public Codec getCodec() {
return codec;
}
@Override
public Collection<MasterSlaveEntry> getEntrySet() {
if (masterSlaveEntry != null) {
return Collections.singletonList(masterSlaveEntry);
}
return Collections.emptyList();
}
protected void initTimer(MasterSlaveServersConfig config) {
int[] timeouts = new int[]{config.getRetryInterval(), config.getTimeout()};
Arrays.sort(timeouts);
int minTimeout = timeouts[0];
if (minTimeout % 100 != 0) {
minTimeout = (minTimeout % 100) / 2;
} else if (minTimeout == 100) {
minTimeout = 50;
} else {
minTimeout = 100;
}
timer = new HashedWheelTimer(new DefaultThreadFactory("redisson-timer"), minTimeout, TimeUnit.MILLISECONDS, 1024, false);
connectionWatcher = new IdleConnectionWatcher(this, config);
subscribeService = new PublishSubscribeService(this, config);
}
protected void initSingleEntry() {
try {
if (config.checkSkipSlavesInit()) {
masterSlaveEntry = new SingleEntry(this, config);
} else {
masterSlaveEntry = new MasterSlaveEntry(this, config);
}
CompletableFuture<RedisClient> masterFuture = masterSlaveEntry.setupMasterEntry(new RedisURI(config.getMasterAddress()));
masterFuture.join();
if (!config.checkSkipSlavesInit()) {
CompletableFuture<Void> fs = masterSlaveEntry.initSlaveBalancer(getDisconnectedNodes());
fs.join();
}
startDNSMonitoring(masterFuture.getNow(null));
} catch (Exception e) {
stopThreads();
if (e instanceof CompletionException) {
throw (RuntimeException) e.getCause();
}
throw e;
}
}
protected void startDNSMonitoring(RedisClient masterHost) {
if (masterHost.getConfig().getAddress().isIP()) {
return;
}
if (config.getDnsMonitoringInterval() != -1) {
Set<RedisURI> slaveAddresses = config.getSlaveAddresses().stream().map(r -> new RedisURI(r)).collect(Collectors.toSet());
dnsMonitor = new DNSMonitor(this, masterHost,
slaveAddresses, config.getDnsMonitoringInterval(), resolverGroup);
dnsMonitor.start();
}
}
protected Collection<RedisURI> getDisconnectedNodes() {
return Collections.emptySet();
}
protected MasterSlaveServersConfig create(BaseMasterSlaveServersConfig<?> cfg) {
MasterSlaveServersConfig c = new MasterSlaveServersConfig();
c.setPingConnectionInterval(cfg.getPingConnectionInterval());
c.setSslEnableEndpointIdentification(cfg.isSslEnableEndpointIdentification());
c.setSslProvider(cfg.getSslProvider());
c.setSslTruststore(cfg.getSslTruststore());
c.setSslTruststorePassword(cfg.getSslTruststorePassword());
c.setSslKeystore(cfg.getSslKeystore());
c.setSslKeystorePassword(cfg.getSslKeystorePassword());
c.setSslProtocols(cfg.getSslProtocols());
c.setRetryInterval(cfg.getRetryInterval());
c.setRetryAttempts(cfg.getRetryAttempts());
c.setTimeout(cfg.getTimeout());
c.setLoadBalancer(cfg.getLoadBalancer());
c.setPassword(cfg.getPassword());
c.setUsername(cfg.getUsername());
c.setClientName(cfg.getClientName());
c.setMasterConnectionPoolSize(cfg.getMasterConnectionPoolSize());
c.setSlaveConnectionPoolSize(cfg.getSlaveConnectionPoolSize());
c.setSubscriptionConnectionPoolSize(cfg.getSubscriptionConnectionPoolSize());
c.setSubscriptionsPerConnection(cfg.getSubscriptionsPerConnection());
c.setConnectTimeout(cfg.getConnectTimeout());
c.setIdleConnectionTimeout(cfg.getIdleConnectionTimeout());
c.setFailedSlaveCheckInterval(cfg.getFailedSlaveCheckInterval());
c.setFailedSlaveReconnectionInterval(cfg.getFailedSlaveReconnectionInterval());
c.setMasterConnectionMinimumIdleSize(cfg.getMasterConnectionMinimumIdleSize());
c.setSlaveConnectionMinimumIdleSize(cfg.getSlaveConnectionMinimumIdleSize());
c.setSubscriptionConnectionMinimumIdleSize(cfg.getSubscriptionConnectionMinimumIdleSize());
c.setReadMode(cfg.getReadMode());
c.setSubscriptionMode(cfg.getSubscriptionMode());
c.setDnsMonitoringInterval(cfg.getDnsMonitoringInterval());
c.setKeepAlive(cfg.isKeepAlive());
c.setTcpNoDelay(cfg.isTcpNoDelay());
c.setNameMapper(cfg.getNameMapper());
return c;
}
@Override
public RedisClient createClient(NodeType type, RedisURI address, String sslHostname) {
RedisClient client = createClient(type, address, config.getConnectTimeout(), config.getTimeout(), sslHostname);
return client;
}
@Override
public RedisClient createClient(NodeType type, InetSocketAddress address, RedisURI uri, String sslHostname) {
RedisClient client = createClient(type, address, uri, config.getConnectTimeout(), config.getTimeout(), sslHostname);
return client;
}
@Override
public RedisClient createClient(NodeType type, RedisURI address, int timeout, int commandTimeout, String sslHostname) {
RedisClientConfig redisConfig = createRedisConfig(type, address, timeout, commandTimeout, sslHostname);
return RedisClient.create(redisConfig);
}
private RedisClient createClient(NodeType type, InetSocketAddress address, RedisURI uri, int timeout, int commandTimeout, String sslHostname) {
RedisClientConfig redisConfig = createRedisConfig(type, null, timeout, commandTimeout, sslHostname);
redisConfig.setAddress(address, uri);
return RedisClient.create(redisConfig);
}
protected RedisClientConfig createRedisConfig(NodeType type, RedisURI address, int timeout, int commandTimeout, String sslHostname) {
RedisClientConfig redisConfig = new RedisClientConfig();
redisConfig.setAddress(address)
.setTimer(timer)
.setExecutor(executor)
.setResolverGroup(resolverGroup)
.setGroup(group)
.setSocketChannelClass(socketChannelClass)
.setConnectTimeout(timeout)
.setCommandTimeout(commandTimeout)
.setSslHostname(sslHostname)
.setSslEnableEndpointIdentification(config.isSslEnableEndpointIdentification())
.setSslProvider(config.getSslProvider())
.setSslTruststore(config.getSslTruststore())
.setSslTruststorePassword(config.getSslTruststorePassword())
.setSslKeystore(config.getSslKeystore())
.setSslKeystorePassword(config.getSslKeystorePassword())
.setSslProtocols(config.getSslProtocols())
.setClientName(config.getClientName())
.setKeepPubSubOrder(cfg.isKeepPubSubOrder())
.setPingConnectionInterval(config.getPingConnectionInterval())
.setKeepAlive(config.isKeepAlive())
.setTcpNoDelay(config.isTcpNoDelay())
.setUsername(config.getUsername())
.setPassword(config.getPassword())
.setNettyHook(cfg.getNettyHook());
if (type != NodeType.SENTINEL) {
redisConfig.setDatabase(config.getDatabase());
}
return redisConfig;
}
@Override
public int calcSlot(String key) {
return singleSlotRange.getStartSlot();
}
@Override
public int calcSlot(byte[] key) {
return singleSlotRange.getStartSlot();
}
@Override
public MasterSlaveEntry getEntry(InetSocketAddress address) {
return masterSlaveEntry;
}
protected MasterSlaveEntry getEntry(RedisURI addr) {
return masterSlaveEntry;
}
@Override
public MasterSlaveEntry getEntry(RedisClient redisClient) {
return masterSlaveEntry;
}
@Override
public MasterSlaveEntry getEntry(String name) {
int slot = calcSlot(name);
return getEntry(slot);
}
@Override
public MasterSlaveEntry getEntry(int slot) {
return masterSlaveEntry;
}
protected CompletableFuture<RedisClient> changeMaster(int slot, RedisURI address) {
MasterSlaveEntry entry = getEntry(slot);
return entry.changeMaster(address);
}
@Override
public CompletableFuture<RedisConnection> connectionWriteOp(NodeSource source, RedisCommand<?> command) {
MasterSlaveEntry entry = getEntry(source);
if (entry == null) {
CompletableFuture<RedisConnection> f = new CompletableFuture<>();
f.completeExceptionally(createNodeNotFoundException(source));
return f;
}
// fix for https://github.com/redisson/redisson/issues/1548
if (source.getRedirect() != null
&& !RedisURI.compare(entry.getClient().getAddr(), source.getAddr())
&& entry.hasSlave(source.getAddr())) {
return entry.redirectedConnectionWriteOp(command, source.getAddr());
}
return entry.connectionWriteOp(command);
}
private MasterSlaveEntry getEntry(NodeSource source) {
if (source.getRedirect() != null) {
return getEntry(source.getAddr());
}
MasterSlaveEntry entry = source.getEntry();
if (source.getRedisClient() != null) {
entry = getEntry(source.getRedisClient());
}
if (entry == null && source.getSlot() != null) {
entry = getEntry(source.getSlot());
}
return entry;
}
@Override
public CompletableFuture<RedisConnection> connectionReadOp(NodeSource source, RedisCommand<?> command) {
MasterSlaveEntry entry = getEntry(source);
if (entry == null) {
CompletableFuture<RedisConnection> f = new CompletableFuture<>();
f.completeExceptionally(createNodeNotFoundException(source));
return f;
}
if (source.getRedirect() != null) {
return entry.connectionReadOp(command, source.getAddr());
}
if (source.getRedisClient() != null) {
return entry.connectionReadOp(command, source.getRedisClient());
}
return entry.connectionReadOp(command);
}
public RedisNodeNotFoundException createNodeNotFoundException(NodeSource source) {
RedisNodeNotFoundException ex;
if (source.getSlot() != null && source.getAddr() == null && source.getRedisClient() == null) {
ex = new RedisNodeNotFoundException("Node for slot: " + source.getSlot() + " hasn't been discovered yet. Check cluster slots coverage using CLUSTER NODES command. Increase value of retryAttempts and/or retryInterval settings.");
} else {
ex = new RedisNodeNotFoundException("Node: " + source + " hasn't been discovered yet. Increase value of retryAttempts and/or retryInterval settings.");
}
return ex;
}
@Override
public void releaseWrite(NodeSource source, RedisConnection connection) {
MasterSlaveEntry entry = getEntry(source);
if (entry == null) {
log.error("Node: " + source + " can't be found");
} else {
entry.releaseWrite(connection);
}
}
@Override
public void releaseRead(NodeSource source, RedisConnection connection) {
MasterSlaveEntry entry = getEntry(source);
if (entry == null) {
log.error("Node: " + source + " can't be found");
} else {
entry.releaseRead(connection);
}
}
@Override
public void shutdown() {
shutdown(0, 2, TimeUnit.SECONDS); //default netty value
}
@Override
public void shutdown(long quietPeriod, long timeout, TimeUnit unit) {
if (dnsMonitor != null) {
dnsMonitor.stop();
}
connectionWatcher.stop();
List<CompletableFuture<Void>> futures = new ArrayList<>();
for (MasterSlaveEntry entry : getEntrySet()) {
futures.add(entry.shutdownAsync());
}
CompletableFuture<Void> future = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
try {
future.get(timeout, unit);
} catch (Exception e) {
// skip
}
resolverGroup.close();
shutdownLatch.close();
if (cfg.getExecutor() == null) {
executor.shutdown();
try {
executor.awaitTermination(timeout, unit);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
shutdownPromise.trySuccess(null);
shutdownLatch.awaitUninterruptibly();
if (cfg.getEventLoopGroup() == null) {
group.shutdownGracefully(quietPeriod, timeout, unit).syncUninterruptibly();
}
timer.stop();
}
@Override
public boolean isShuttingDown() {
return shutdownLatch.isClosed();
}
@Override
public boolean isShutdown() {
return group.isTerminated();
}
@Override
public EventLoopGroup getGroup() {
return group;
}
@Override
public Timeout newTimeout(TimerTask task, long delay, TimeUnit unit) {
try {
return timer.newTimeout(task, delay, unit);
} catch (IllegalStateException e) {
if (isShuttingDown()) {
return DUMMY_TIMEOUT;
}
throw e;
}
}
@Override
public InfinitySemaphoreLatch getShutdownLatch() {
return shutdownLatch;
}
@Override
public Future<Void> getShutdownPromise() {
return shutdownPromise;
}
@Override
public ConnectionEventsHub getConnectionEventsHub() {
return connectionEventsHub;
}
protected void stopThreads() {
shutdown();
}
public PublishSubscribeService getSubscribeService() {
return subscribeService;
}
public ElementsSubscribeService getElementsSubscribeService() {
return elementsSubscribeService;
}
public ExecutorService getExecutor() {
return executor;
}
public RedisURI getLastClusterNode() {
return null;
}
@Override
public RedisURI applyNatMap(RedisURI address) {
return address;
}
@Override
public CompletableFuture<RedisURI> resolveIP(RedisURI address) {
return resolveIP(address.getScheme(), address);
}
protected CompletableFuture<RedisURI> resolveIP(String scheme, RedisURI address) {
if (address.isIP()) {
RedisURI addr = applyNatMap(address);
return CompletableFuture.completedFuture(addr);
}
CompletableFuture<RedisURI> result = new CompletableFuture<>();
AddressResolver<InetSocketAddress> resolver = resolverGroup.getResolver(getGroup().next());
InetSocketAddress addr = InetSocketAddress.createUnresolved(address.getHost(), address.getPort());
Future<InetSocketAddress> future = resolver.resolve(addr);
future.addListener((FutureListener<InetSocketAddress>) f -> {
if (!f.isSuccess()) {
log.error("Unable to resolve " + address, f.cause());
result.completeExceptionally(f.cause());
return;
}
InetSocketAddress s = f.getNow();
RedisURI uri = toURI(scheme, s.getAddress().getHostAddress(), "" + address.getPort());
result.complete(uri);
});
return result;
}
protected RedisURI toURI(String scheme, String host, String port) {
// convert IPv6 address to unified compressed format
if (NetUtil.isValidIpV6Address(host)) {
byte[] addr = NetUtil.createByteArrayFromIpAddressString(host);
try {
InetAddress ia = InetAddress.getByAddress(host, addr);
host = ia.getHostAddress();
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
}
RedisURI uri = new RedisURI(scheme + "://" + host + ":" + port);
return applyNatMap(uri);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.