gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.util.ConverterUtils;
public class AppLogAggregatorImpl implements AppLogAggregator {
private static final Log LOG = LogFactory
.getLog(AppLogAggregatorImpl.class);
private static final int THREAD_SLEEP_TIME = 1000;
private static final String TMP_FILE_SUFFIX = ".tmp";
private final LocalDirsHandlerService dirsHandler;
private final Dispatcher dispatcher;
private final ApplicationId appId;
private final String applicationId;
private boolean logAggregationDisabled = false;
private final Configuration conf;
private final DeletionService delService;
private final UserGroupInformation userUgi;
private final Path remoteNodeLogFileForApp;
private final Path remoteNodeTmpLogFileForApp;
private final ContainerLogsRetentionPolicy retentionPolicy;
private final BlockingQueue<ContainerId> pendingContainers;
private final AtomicBoolean appFinishing = new AtomicBoolean();
private final AtomicBoolean appAggregationFinished = new AtomicBoolean();
private final Map<ApplicationAccessType, String> appAcls;
private LogWriter writer = null;
public AppLogAggregatorImpl(Dispatcher dispatcher,
DeletionService deletionService, Configuration conf, ApplicationId appId,
UserGroupInformation userUgi, LocalDirsHandlerService dirsHandler,
Path remoteNodeLogFileForApp,
ContainerLogsRetentionPolicy retentionPolicy,
Map<ApplicationAccessType, String> appAcls) {
this.dispatcher = dispatcher;
this.conf = conf;
this.delService = deletionService;
this.appId = appId;
this.applicationId = ConverterUtils.toString(appId);
this.userUgi = userUgi;
this.dirsHandler = dirsHandler;
this.remoteNodeLogFileForApp = remoteNodeLogFileForApp;
this.remoteNodeTmpLogFileForApp = getRemoteNodeTmpLogFileForApp();
this.retentionPolicy = retentionPolicy;
this.pendingContainers = new LinkedBlockingQueue<ContainerId>();
this.appAcls = appAcls;
}
private void uploadLogsForContainer(ContainerId containerId) {
if (this.logAggregationDisabled) {
return;
}
// Lazy creation of the writer
if (this.writer == null) {
LOG.info("Starting aggregate log-file for app " + this.applicationId
+ " at " + this.remoteNodeTmpLogFileForApp);
try {
this.writer =
new LogWriter(this.conf, this.remoteNodeTmpLogFileForApp,
this.userUgi);
//Write ACLs once when and if the writer is created.
this.writer.writeApplicationACLs(appAcls);
this.writer.writeApplicationOwner(this.userUgi.getShortUserName());
} catch (IOException e) {
LOG.error("Cannot create writer for app " + this.applicationId
+ ". Disabling log-aggregation for this app.", e);
this.logAggregationDisabled = true;
return;
}
}
LOG.info("Uploading logs for container " + containerId
+ ". Current good log dirs are "
+ StringUtils.join(",", dirsHandler.getLogDirs()));
LogKey logKey = new LogKey(containerId);
LogValue logValue = new LogValue(dirsHandler.getLogDirs(), containerId);
try {
this.writer.append(logKey, logValue);
} catch (IOException e) {
LOG.error("Couldn't upload logs for " + containerId
+ ". Skipping this container.");
}
}
@Override
public void run() {
try {
doAppLogAggregation();
} finally {
if (!this.appAggregationFinished.get()) {
LOG.warn("Aggregation did not complete for application " + appId);
}
this.appAggregationFinished.set(true);
}
}
@SuppressWarnings("unchecked")
private void doAppLogAggregation() {
ContainerId containerId;
while (!this.appFinishing.get()) {
synchronized(this) {
try {
wait(THREAD_SLEEP_TIME);
} catch (InterruptedException e) {
LOG.warn("PendingContainers queue is interrupted");
this.appFinishing.set(true);
}
}
}
// Application is finished. Finish pending-containers
while ((containerId = this.pendingContainers.poll()) != null) {
uploadLogsForContainer(containerId);
}
// Remove the local app-log-dirs
List<String> rootLogDirs = dirsHandler.getLogDirs();
Path[] localAppLogDirs = new Path[rootLogDirs.size()];
int index = 0;
for (String rootLogDir : rootLogDirs) {
localAppLogDirs[index] = new Path(rootLogDir, this.applicationId);
index++;
}
this.delService.delete(this.userUgi.getShortUserName(), null,
localAppLogDirs);
if (this.writer != null) {
this.writer.closeWriter();
LOG.info("Finished aggregate log-file for app " + this.applicationId);
}
try {
userUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
FileSystem remoteFS = FileSystem.get(conf);
remoteFS.rename(remoteNodeTmpLogFileForApp, remoteNodeLogFileForApp);
return null;
}
});
} catch (Exception e) {
LOG.error("Failed to move temporary log file to final location: ["
+ remoteNodeTmpLogFileForApp + "] to [" + remoteNodeLogFileForApp
+ "]", e);
}
this.dispatcher.getEventHandler().handle(
new ApplicationEvent(this.appId,
ApplicationEventType.APPLICATION_LOG_HANDLING_FINISHED));
this.appAggregationFinished.set(true);
}
private Path getRemoteNodeTmpLogFileForApp() {
return new Path(remoteNodeLogFileForApp.getParent(),
(remoteNodeLogFileForApp.getName() + TMP_FILE_SUFFIX));
}
private boolean shouldUploadLogs(ContainerId containerId,
boolean wasContainerSuccessful) {
// All containers
if (this.retentionPolicy
.equals(ContainerLogsRetentionPolicy.ALL_CONTAINERS)) {
return true;
}
// AM Container only
if (this.retentionPolicy
.equals(ContainerLogsRetentionPolicy.APPLICATION_MASTER_ONLY)) {
if (containerId.getId() == 1) {
return true;
}
return false;
}
// AM + Failing containers
if (this.retentionPolicy
.equals(ContainerLogsRetentionPolicy.AM_AND_FAILED_CONTAINERS_ONLY)) {
if (containerId.getId() == 1) {
return true;
} else if(!wasContainerSuccessful) {
return true;
}
return false;
}
return false;
}
@Override
public void startContainerLogAggregation(ContainerId containerId,
boolean wasContainerSuccessful) {
if (shouldUploadLogs(containerId, wasContainerSuccessful)) {
LOG.info("Considering container " + containerId
+ " for log-aggregation");
this.pendingContainers.add(containerId);
}
}
@Override
public synchronized void finishLogAggregation() {
LOG.info("Application just finished : " + this.applicationId);
this.appFinishing.set(true);
this.notifyAll();
}
}
| |
package com.google.sitebricks.routing;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import javax.validation.ValidationException;
import net.jcip.annotations.GuardedBy;
import net.jcip.annotations.ThreadSafe;
import org.jetbrains.annotations.Nullable;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.inject.BindingAnnotation;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Singleton;
import com.google.inject.TypeLiteral;
import com.google.inject.name.Named;
import com.google.sitebricks.ActionDescriptor;
import com.google.sitebricks.At;
import com.google.sitebricks.Bricks;
import com.google.sitebricks.Renderable;
import com.google.sitebricks.Show;
import com.google.sitebricks.client.Transport;
import com.google.sitebricks.conversion.TypeConverter;
import com.google.sitebricks.headless.Reply;
import com.google.sitebricks.headless.Request;
import com.google.sitebricks.headless.Service;
import com.google.sitebricks.http.As;
import com.google.sitebricks.http.Get;
import com.google.sitebricks.http.Head;
import com.google.sitebricks.http.Select;
import com.google.sitebricks.http.Trace;
import com.google.sitebricks.http.negotiate.ContentNegotiator;
import com.google.sitebricks.http.negotiate.Negotiation;
import com.google.sitebricks.rendering.Strings;
import com.google.sitebricks.rendering.control.DecorateWidget;
import com.google.sitebricks.transport.Form;
/**
* contains active uri/widget mappings
*
* @author Dhanji R. Prasanna (dhanji@gmail.com)
*/
@ThreadSafe @Singleton
public class DefaultPageBook implements PageBook {
//multimaps TODO refactor to multimap?
@GuardedBy("lock") // All three following fields
private final Map<String, List<PageTuple>> pages = Maps.newHashMap();
private final List<PageTuple> universalMatchingPages = Lists.newArrayList();
private final Map<String, PageTuple> pagesByName = Maps.newHashMap();
private final ConcurrentMap<Class<?>, PageTuple> classToPageMap =
new MapMaker()
.weakKeys()
.weakValues()
.makeMap();
private final Object lock = new Object();
private final Injector injector;
@Inject
public DefaultPageBook(Injector injector) {
this.injector = injector;
}
@Override @SuppressWarnings("unchecked")
public Collection<List<Page>> getPageMap() {
return (Collection) pages.values();
}
// Page registration (internal) APIs
public Page serviceAt(String uri, Class<?> pageClass) {
// Handle subpaths, registering each as a separate instance of the page
// tuple.
for (Method method : pageClass.getDeclaredMethods()) {
if (method.isAnnotationPresent(At.class)) {
// This is a subpath expression.
At at = method.getAnnotation(At.class);
String subpath = at.value();
// Validate subpath
if (!subpath.startsWith("/") || subpath.isEmpty() || subpath.length() == 1) {
throw new IllegalArgumentException(String.format(
"Subpath At(\"%s\") on %s.%s() must begin with a \"/\" and must not be empty",
subpath, pageClass.getName(), method.getName()));
}
subpath = uri + subpath;
// Register as headless web service.
doAt(subpath, pageClass, true);
}
}
return doAt(uri, pageClass, true);
}
public PageTuple at(String uri, Class<?> clazz) {
return at(uri, clazz, clazz.isAnnotationPresent(Service.class));
}
@Override
public void at(String uri, List<ActionDescriptor> actionDescriptors,
Map<Class<? extends Annotation>, String> methodSet) {
Multimap<String, Action> actions = HashMultimap.create();
for (ActionDescriptor actionDescriptor : actionDescriptors) {
for (Class<? extends Annotation> method : actionDescriptor.getMethods()) {
String methodString = methodSet.get(method);
Action action = actionDescriptor.getAction();
if (null == action) {
action = injector.getInstance(actionDescriptor.getActionKey());
} else {
injector.injectMembers(action);
}
actions.put(methodString, new SpiAction(action, actionDescriptor));
}
}
// Register into the book!
at(new PageTuple(uri, new PathMatcherChain(uri), null, true, false, injector, actions));
}
private void at(PageTuple page) {
// Is Universal?
synchronized (lock) {
String key = firstPathElement(page.getUri());
if (isVariable(key)) {
universalMatchingPages.add(page);
} else {
multiput(pages, key, page);
}
}
// Actions are not backed by classes.
if (page.pageClass() != null)
classToPageMap.put(page.pageClass(), page);
}
private PageTuple at(String uri, Class<?> clazz, boolean headless) {
// Handle subpaths, registering each as a separate instance of the page
// tuple.
for (Method method : clazz.getDeclaredMethods()) {
if (method.isAnnotationPresent(At.class)) {
// This is a subpath expression.
At at = method.getAnnotation(At.class);
String subpath = at.value();
// Validate subpath
if (!subpath.startsWith("/") || subpath.isEmpty() || subpath.length() == 1) {
throw new IllegalArgumentException(String.format(
"Subpath At(\"%s\") on %s.%s() must begin with a \"/\" and must not be empty",
subpath, clazz.getName(), method.getName()));
}
subpath = uri + subpath;
// Register as headless web service.
doAt(subpath, clazz, headless);
}
}
return doAt(uri, clazz, headless);
}
private PageTuple doAt(String uri, Class<?> clazz, boolean headless) {
final String key = firstPathElement(uri);
final PageTuple pageTuple =
new PageTuple(uri, new PathMatcherChain(uri), clazz, injector, headless, false);
synchronized (lock) {
//is universal? (i.e. first element is a variable)
if (isVariable(key))
universalMatchingPages.add(pageTuple);
else {
multiput(pages, key, pageTuple);
}
}
// Does not need to be inside lock, as it is concurrent.
classToPageMap.put(clazz, pageTuple);
return pageTuple;
}
public Page embedAs(Class<?> clazz, String as) {
Preconditions.checkArgument(null == clazz.getAnnotation(Service.class),
"You cannot embed headless web services!");
PageTuple pageTuple = new PageTuple("", PathMatcherChain.ignoring(), clazz, injector, false, false);
synchronized (lock) {
pagesByName.put(as.toLowerCase(), pageTuple);
}
return pageTuple;
}
public Page decorate(Class<?> pageClass) {
Preconditions.checkArgument(null == pageClass.getAnnotation(Service.class),
"You cannot extend headless web services!");
PageTuple pageTuple = new PageTuple("", PathMatcherChain.ignoring(), pageClass, injector, false, true);
// store page with a special name used by DecorateWidget
String name = DecorateWidget.embedNameFor(pageClass);
synchronized (lock) {
pagesByName.put(name, pageTuple);
}
return pageTuple;
}
public Page nonCompilingGet(String uri) {
// The regular get is non compiling, in our case. So these methods are identical.
return get(uri);
}
private static void multiput(Map<String, List<PageTuple>> pages, String key,
PageTuple page) {
List<PageTuple> list = pages.get(key);
if (null == list) {
list = new ArrayList<PageTuple>();
pages.put(key, list);
}
list.add(page);
}
private static boolean isVariable(String key) {
return key.length() > 0 && ':' == key.charAt(0);
}
String firstPathElement(String uri) {
String shortUri = uri.substring(1);
final int index = shortUri.indexOf("/");
return (index >= 0) ? shortUri.substring(0, index) : shortUri;
}
@Nullable
public Page get(String uri) {
final String key = firstPathElement(uri);
List<PageTuple> tuple = pages.get(key);
//first try static first piece
if (null != tuple) {
//first try static first piece
for (PageTuple pageTuple : tuple) {
if (pageTuple.matcher.matches(uri))
return pageTuple;
}
}
//now try dynamic first piece (how can we make this faster?)
for (PageTuple pageTuple : universalMatchingPages) {
if (pageTuple.matcher.matches(uri))
return pageTuple;
}
//nothing matched
return null;
}
public Page forName(String name) {
return pagesByName.get(name);
}
@Nullable
public Page forInstance(Object instance) {
Class<?> aClass = instance.getClass();
PageTuple targetType = classToPageMap.get(aClass);
// Do a super crawl to detect the target type.
while (null == targetType) {
aClass = aClass.getSuperclass();
targetType = classToPageMap.get(aClass);
// Stop at the root =D
if (Object.class.equals(aClass)) {
return null;
}
}
return InstanceBoundPage.delegating(targetType, instance);
}
public Page forClass(Class<?> pageClass) {
return classToPageMap.get(pageClass);
}
public static class InstanceBoundPage implements Page {
private final Page delegate;
private final Object instance;
private InstanceBoundPage(Page delegate, Object instance) {
this.delegate = delegate;
this.instance = instance;
}
public Renderable widget() {
return delegate.widget();
}
public Object instantiate() {
return instance;
}
public Object doMethod(String httpMethod, Object page, String pathInfo, Request request)
throws IOException {
return delegate.doMethod(httpMethod, page, pathInfo, request);
}
public Class<?> pageClass() {
return delegate.pageClass();
}
public void apply(Renderable widget) {
delegate.apply(widget);
}
public String getUri() {
return delegate.getUri();
}
public boolean isHeadless() {
return delegate.isHeadless();
}
@Override
public boolean isDecorated() {
return delegate.isDecorated();
}
public Set<String> getMethod() {
return delegate.getMethod();
}
public int compareTo(Page page) {
return delegate.compareTo(page);
}
public static InstanceBoundPage delegating(Page delegate, Object instance) {
return new InstanceBoundPage(delegate, instance);
}
@Override
public Show getShow() {
return delegate.getShow();
}
}
@Select("") //the default select (hacky!!)
public static class PageTuple implements Page {
private final String uri;
private final PathMatcher matcher;
private final AtomicReference<Renderable> pageWidget = new AtomicReference<Renderable>();
private final Class<?> clazz;
private final boolean headless;
private final boolean extension;
private final Injector injector;
private final Multimap<String, Action> methods;
//dispatcher switch (select on request param by default)
private final Select select;
private static final Key<Map<String, Class<? extends Annotation>>> HTTP_METHODS_KEY =
Key.get(new TypeLiteral<Map<String, Class<? extends Annotation>>>() {}, Bricks.class);
// A map of http methods -> annotation types (e.g. "POST" -> @Post)
private Map<String, Class<? extends Annotation>> httpMethods;
public PageTuple(String uri, PathMatcher matcher, Class<?> clazz, boolean headless, boolean extension,
Injector injector, Multimap<String, Action> methods) {
this.uri = uri;
this.matcher = matcher;
this.clazz = clazz;
this.headless = headless;
this.extension = extension;
this.injector = injector;
this.methods = methods;
this.select = PageTuple.class.getAnnotation(Select.class);
this.httpMethods = injector.getInstance(HTTP_METHODS_KEY);
}
public PageTuple(String uri, PathMatcher matcher, Class<?> clazz, Injector injector,
boolean headless, boolean extension) {
this.uri = uri;
this.matcher = matcher;
this.clazz = clazz;
this.injector = injector;
this.headless = headless;
this.extension = extension;
this.select = discoverSelect(clazz);
this.httpMethods = injector.getInstance(HTTP_METHODS_KEY);
this.methods = reflectAndCache(uri, httpMethods);
}
//the @Select request parameter-based event dispatcher
private Select discoverSelect(Class<?> clazz) {
final Select select = clazz.getAnnotation(Select.class);
if (null != select)
return select;
else
return PageTuple.class.getAnnotation(Select.class);
}
/**
* Returns a map of HTTP-method name to @Annotation-marked methods
*/
@SuppressWarnings({"JavaDoc"})
private Multimap<String, Action> reflectAndCache(String uri,
Map<String, Class<? extends Annotation>> methodMap) {
String tail = "";
if (clazz.isAnnotationPresent(At.class)) {
int length = clazz.getAnnotation(At.class).value().length();
// It's possible that the uri being registered is shorter than the
// class length, this can happen in the case of using the .at() module
// directive to override @At() URI path mapping. In this case we treat
// this call as a top-level path registration with no tail. Any
// encountered subpath @At methods will be ignored for this URI.
if (uri != null && length <= uri.length())
tail = uri.substring(length);
}
Multimap<String, Action> map = HashMultimap.create();
for (Map.Entry<String, Class<? extends Annotation>> entry : methodMap.entrySet()) {
Class<? extends Annotation> get = entry.getValue();
// First search any available public methods and store them (including inherited ones)
for (Method method : clazz.getMethods()) {
if (method.isAnnotationPresent(get)) {
if (!method.isAccessible())
method.setAccessible(true); //ugh
// Be defensive about subpaths.
if (method.isAnnotationPresent(At.class)) {
// Skip any at-annotated methods for a top-level path registration.
if (tail.isEmpty()) {
continue;
}
// Skip any at-annotated methods that do not exactly match the path.
if (!tail.equals(method.getAnnotation(At.class).value())) {
continue;
}
} else if (!tail.isEmpty()) {
// If this is the top-level method we're scanning, but their is a tail, i.e.
// this is not intended to be served by the top-level method, then skip.
continue;
}
// Otherwise register this method for firing...
//remember default value is empty string
String value = getValue(get, method);
String key = (Strings.empty(value)) ? entry.getKey() : entry.getKey() + value;
map.put(key, new MethodTuple(method, injector));
}
}
// Then search class's declared methods only (these take precedence)
for (Method method : clazz.getDeclaredMethods()) {
if (method.isAnnotationPresent(get)) {
if (!method.isAccessible())
method.setAccessible(true); //ugh
// Be defensive about subpaths.
if (method.isAnnotationPresent(At.class)) {
// Skip any at-annotated methods for a top-level path registration.
if (tail.isEmpty()) {
continue;
}
// Skip any at-annotated methods that do not exactly match the path.
if (!tail.equals(method.getAnnotation(At.class).value())) {
continue;
}
} else if (!tail.isEmpty()) {
// If this is the top-level method we're scanning, but their is a tail, i.e.
// this is not intended to be served by the top-level method, then skip.
continue;
}
// Otherwise register this method for firing...
//remember default value is empty string
String value = getValue(get, method);
String key = (Strings.empty(value)) ? entry.getKey() : entry.getKey() + value;
map.put(key, new MethodTuple(method, injector));
}
}
}
return map;
}
private String getValue(Class<? extends Annotation> get, Method method) {
return readAnnotationValue(method.getAnnotation(get));
}
public Renderable widget() {
return pageWidget.get();
}
public Object instantiate() {
return clazz == null ? Collections.emptyMap() : injector.getInstance(clazz);
}
public boolean isHeadless() {
return headless;
}
@Override
public boolean isDecorated() {
return extension;
}
public Set<String> getMethod() {
return methods.keySet();
}
public int compareTo(Page page) {
return uri.compareTo(page.getUri());
}
public Object doMethod(String httpMethod, Object page, String pathInfo,
Request request) throws IOException {
//nothing to fire
if (Strings.empty(httpMethod)) {
return null;
}
// NOTE(dhanji): This slurps the entire Map. It could potentially be optimized...
Multimap<String, String> params = request.params();
// Extract injectable pieces of the pathInfo.
final Map<String, String> map = matcher.findMatches(pathInfo);
// Find method(s) to dispatch to.
Collection<String> events = params.get(select.value());
if (null != events) {
boolean matched = false;
for (String event : events) {
String key = httpMethod + event;
Collection<Action> tuples = methods.get(key);
Object redirect = null;
if (null != tuples) {
for (Action action : tuples) {
if (action.shouldCall(request)) {
matched = true;
redirect = action.call(request, page, map);
break;
}
}
}
// Redirects interrupt the event dispatch sequence. Note this might cause inconsistent
// behaviour depending on the order of processing for events.
if (null != redirect) {
return redirect;
}
}
// no matched events. Fire default handler
if (!matched) {
return callAction(httpMethod, page, map, request);
}
} else {
// Fire default handler (no events defined)
return callAction(httpMethod, page, map, request);
}
//no redirects, render normally
return null;
}
private Object callAction(String httpMethod, Object page, Map<String, String> pathMap,
Request request) throws IOException {
// There may be more than one default handler
Collection<Action> tuple = methods.get(httpMethod);
Object redirect = null;
if (null != tuple) {
for (Action action : tuple) {
if (action.shouldCall(request)) {
redirect = action.call(request, page, pathMap);
break;
}
}
}
return redirect;
}
public Class<?> pageClass() {
return clazz;
}
public void apply(Renderable widget) {
this.pageWidget.set(widget);
}
public String getUri() {
return uri;
}
@Override
public Show getShow() {
for (String httpMethod: methods.keySet()) {
Collection<Action> actions = methods.get(httpMethod);
if (actions != null) {
for (Action action: actions) {
Method method = action.getMethod();
if (method != null) {
Show show = action.getMethod().getAnnotation(Show.class);
if (show != null) {
return show;
}
}
}
}
}
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Page)) return false;
Page that = (Page) o;
return this.clazz.equals(that.pageClass()) && isDecorated() == that.isDecorated();
}
@Override
public int hashCode() {
return clazz.hashCode();
}
@Override
public String toString() {
return Objects.toStringHelper(PageTuple.class).add("clazz", clazz).add("isDecorated", extension)
.add("uri", uri).add("methods", methods).toString();
}
}
private static class MethodTuple implements Action {
private final Method method;
private final Injector injector;
private final List<Object> args;
private final Map<String, String> negotiates;
private final ContentNegotiator negotiator;
private final TypeConverter converter;
private final As returnAs;
private MethodTuple(Method method, Injector injector) {
this.method = method;
this.injector = injector;
this.args = reflect(method);
this.negotiates = discoverNegotiates(method, injector);
this.negotiator = injector.getInstance(ContentNegotiator.class);
this.converter = injector.getInstance(TypeConverter.class);
this.returnAs = method.getAnnotation(As.class);
}
private List<Object> reflect(Method method) {
final Annotation[][] annotationsGrid = method.getParameterAnnotations();
if (null == annotationsGrid)
return Collections.emptyList();
List<Object> args = new ArrayList<Object>();
for (int i = 0; i < annotationsGrid.length; i++) {
Annotation[] annotations = annotationsGrid[i];
Annotation bindingAnnotation = null;
boolean preInjectableFound = false;
for (Annotation annotation : annotations) {
if (Named.class.isInstance(annotation)) {
Named named = (Named) annotation;
args.add(new NamedParameter(named.value(), method.getGenericParameterTypes()[i]));
preInjectableFound = true;
break;
}
if (javax.inject.Named.class.isInstance(annotation)) {
javax.inject.Named named = (javax.inject.Named) annotation;
args.add(new NamedParameter(named.value(), method.getGenericParameterTypes()[i]));
preInjectableFound = true;
break;
}
else if (annotation.annotationType().isAnnotationPresent(BindingAnnotation.class)) {
bindingAnnotation = annotation;
}
else if (As.class.isInstance(annotation)) {
As as = (As) annotation;
if (method.isAnnotationPresent(Get.class)
|| method.isAnnotationPresent(Head.class)
|| method.isAnnotationPresent(Trace.class)) {
if (! as.value().equals(Form.class)) {
throw new IllegalArgumentException("Cannot accept a @As(...) request body from" +
" method marked @Get, @Head or @Trace: "
+ method.getDeclaringClass().getName() + "#" + method.getName() + "()");
}
}
preInjectableFound = true;
args.add(new AsParameter(as.value(), TypeLiteral.get(method.getGenericParameterTypes()[i])));
break;
}
}
if (!preInjectableFound) {
Type genericParameterType = method.getGenericParameterTypes()[i];
Key<?> key = (null != bindingAnnotation)
? Key.get(genericParameterType, bindingAnnotation)
: Key.get(genericParameterType);
args.add(key);
if (null == injector.getBindings().get(key)) {
throw new InvalidEventHandlerException(
"Encountered an argument not annotated with @Named and not a valid injection key"
+ " in event handler method: " + method + " " + key);
}
}
}
return Collections.unmodifiableList(args);
}
/**
* @return true if this method tuple can be validly called against this request.
* Used to select for content negotiation.
*/
@Override
public boolean shouldCall(Request request) {
return negotiator.shouldCall(negotiates, request);
}
@Override
public Object call(Request request, Object page, Map<String, String> map) throws IOException {
List<Object> arguments = new ArrayList<Object>();
for (Object arg : args) {
if (arg instanceof AsParameter) {
AsParameter as = (AsParameter) arg;
arguments.add(request.read(as.type).as(as.transport));
} else if (arg instanceof NamedParameter) {
NamedParameter np = (NamedParameter) arg;
String text = map.get(np.getName());
Object value = converter.convert(text, np.getType());
arguments.add(value);
} else
arguments.add(injector.getInstance((Key<?>) arg));
}
Object result = call(page, method, arguments.toArray());
if (returnAs != null && result instanceof Reply) {
((Reply) result).as(returnAs.value());
}
return result;
}
@Override
public Method getMethod() {
return this.method;
}
private static Object call(Object page, final Method method,
Object[] args) {
try {
return method.invoke(page, args);
} catch (IllegalAccessException e) {
throw new EventDispatchException(
"Could not access event method (appears to be a security problem): " + method, e);
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof ValidationException) {
throw (ValidationException) cause;
}
StackTraceElement[] stackTrace = cause.getStackTrace();
throw new EventDispatchException(String.format(
"Exception [%s - \"%s\"] thrown by event method [%s]\n\nat %s\n"
+ "(See below for entire trace.)\n",
cause.getClass().getSimpleName(),
cause.getMessage(), method,
stackTrace[0]), e);
}
}
//the @Accept request header-based event dispatcher
private Map<String, String> discoverNegotiates(Method method, Injector injector) {
// This ugly gunk gets us the map of headers to negotiation annotations
Map<String, Class<? extends Annotation>> negotiationsMap = injector.getInstance(
Key.get(new TypeLiteral<Map<String, Class<? extends Annotation>>>(){ }, Negotiation.class));
Map<String, String> negotiations = Maps.newHashMap();
// Gather all the negotiation annotations in this class.
for (Map.Entry<String, Class<? extends Annotation>> headerAnn : negotiationsMap.entrySet()) {
Annotation annotation = method.getAnnotation(headerAnn.getValue());
if (annotation != null) {
negotiations.put(headerAnn.getKey(), readAnnotationValue(annotation));
}
}
return negotiations;
}
public class NamedParameter {
private final String name;
private final Type type;
public NamedParameter(String name, Type type) {
this.name = name;
this.type = type;
}
public String getName() {
return name;
}
public Type getType() {
return type;
}
}
public class AsParameter {
private final Class<? extends Transport> transport;
private final TypeLiteral<?> type;
public AsParameter(Class<? extends Transport> transport, TypeLiteral<?> type) {
this.transport = transport;
this.type = type;
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MethodTuple that = (MethodTuple) o;
if (!method.equals(that.method)) return false;
return true;
}
@Override
public int hashCode() {
return method.hashCode();
}
@Override
public String toString() {
return "MethodTuple [method=" + method + ", args=" + args + "]";
}
}
/**
* A simple utility method that reads the String value attribute of any annotation
* instance.
*/
static String readAnnotationValue(Annotation annotation) {
try {
Method m = annotation.getClass().getMethod("value");
return (String) m.invoke(annotation);
} catch (NoSuchMethodException e) {
throw new IllegalStateException("Encountered a configured annotation that " +
"has no value parameter. This should never happen. " + annotation, e);
} catch (InvocationTargetException e) {
throw new IllegalStateException("Encountered a configured annotation that " +
"could not be read." + annotation, e);
} catch (IllegalAccessException e) {
throw new IllegalStateException("Encountered a configured annotation that " +
"could not be read." + annotation, e);
}
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import static com.google.protobuf.Internal.checkNotNull;
import java.io.IOException;
import java.io.InputStream;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.InvalidMarkException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.List;
/** A {@link ByteString} that wraps around a {@link ByteBuffer}. */
final class NioByteString extends ByteString.LeafByteString {
private final ByteBuffer buffer;
NioByteString(ByteBuffer buffer) {
checkNotNull(buffer, "buffer");
// Use native byte order for fast fixed32/64 operations.
this.buffer = buffer.slice().order(ByteOrder.nativeOrder());
}
// =================================================================
// Serializable
/** Magic method that lets us override serialization behavior. */
private Object writeReplace() {
return ByteString.copyFrom(buffer.slice());
}
/** Magic method that lets us override deserialization behavior. */
private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException {
throw new InvalidObjectException("NioByteString instances are not to be serialized directly");
}
// =================================================================
@Override
public byte byteAt(int index) {
try {
return buffer.get(index);
} catch (ArrayIndexOutOfBoundsException e) {
throw e;
} catch (IndexOutOfBoundsException e) {
throw new ArrayIndexOutOfBoundsException(e.getMessage());
}
}
@Override
public byte internalByteAt(int index) {
// it isn't possible to avoid the bounds checking inside of ByteBuffer, so just use the default
// implementation.
return byteAt(index);
}
@Override
public int size() {
return buffer.remaining();
}
@Override
public ByteString substring(int beginIndex, int endIndex) {
try {
ByteBuffer slice = slice(beginIndex, endIndex);
return new NioByteString(slice);
} catch (ArrayIndexOutOfBoundsException e) {
throw e;
} catch (IndexOutOfBoundsException e) {
throw new ArrayIndexOutOfBoundsException(e.getMessage());
}
}
@Override
protected void copyToInternal(
byte[] target, int sourceOffset, int targetOffset, int numberToCopy) {
ByteBuffer slice = buffer.slice();
slice.position(sourceOffset);
slice.get(target, targetOffset, numberToCopy);
}
@Override
public void copyTo(ByteBuffer target) {
target.put(buffer.slice());
}
@Override
public void writeTo(OutputStream out) throws IOException {
out.write(toByteArray());
}
@Override
boolean equalsRange(ByteString other, int offset, int length) {
return substring(0, length).equals(other.substring(offset, offset + length));
}
@Override
void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException {
if (buffer.hasArray()) {
// Optimized write for array-backed buffers.
// Note that we're taking the risk that a malicious OutputStream could modify the array.
int bufferOffset = buffer.arrayOffset() + buffer.position() + sourceOffset;
out.write(buffer.array(), bufferOffset, numberToWrite);
return;
}
ByteBufferWriter.write(slice(sourceOffset, sourceOffset + numberToWrite), out);
}
@Override
void writeTo(ByteOutput output) throws IOException {
output.writeLazy(buffer.slice());
}
@Override
public ByteBuffer asReadOnlyByteBuffer() {
return buffer.asReadOnlyBuffer();
}
@Override
public List<ByteBuffer> asReadOnlyByteBufferList() {
return Collections.singletonList(asReadOnlyByteBuffer());
}
@Override
protected String toStringInternal(Charset charset) {
final byte[] bytes;
final int offset;
final int length;
if (buffer.hasArray()) {
bytes = buffer.array();
offset = buffer.arrayOffset() + buffer.position();
length = buffer.remaining();
} else {
// TODO(nathanmittler): Can we optimize this?
bytes = toByteArray();
offset = 0;
length = bytes.length;
}
return new String(bytes, offset, length, charset);
}
@Override
public boolean isValidUtf8() {
return Utf8.isValidUtf8(buffer);
}
@Override
protected int partialIsValidUtf8(int state, int offset, int length) {
return Utf8.partialIsValidUtf8(state, buffer, offset, offset + length);
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof ByteString)) {
return false;
}
ByteString otherString = ((ByteString) other);
if (size() != otherString.size()) {
return false;
}
if (size() == 0) {
return true;
}
if (other instanceof NioByteString) {
return buffer.equals(((NioByteString) other).buffer);
}
if (other instanceof RopeByteString) {
return other.equals(this);
}
return buffer.equals(otherString.asReadOnlyByteBuffer());
}
@Override
protected int partialHash(int h, int offset, int length) {
for (int i = offset; i < offset + length; i++) {
h = h * 31 + buffer.get(i);
}
return h;
}
@Override
public InputStream newInput() {
return new InputStream() {
private final ByteBuffer buf = buffer.slice();
@Override
public void mark(int readlimit) {
buf.mark();
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void reset() throws IOException {
try {
buf.reset();
} catch (InvalidMarkException e) {
throw new IOException(e);
}
}
@Override
public int available() throws IOException {
return buf.remaining();
}
@Override
public int read() throws IOException {
if (!buf.hasRemaining()) {
return -1;
}
return buf.get() & 0xFF;
}
@Override
public int read(byte[] bytes, int off, int len) throws IOException {
if (!buf.hasRemaining()) {
return -1;
}
len = Math.min(len, buf.remaining());
buf.get(bytes, off, len);
return len;
}
};
}
@Override
public CodedInputStream newCodedInput() {
return CodedInputStream.newInstance(buffer, true);
}
/**
* Creates a slice of a range of this buffer.
*
* @param beginIndex the beginning index of the slice (inclusive).
* @param endIndex the end index of the slice (exclusive).
* @return the requested slice.
*/
private ByteBuffer slice(int beginIndex, int endIndex) {
if (beginIndex < buffer.position() || endIndex > buffer.limit() || beginIndex > endIndex) {
throw new IllegalArgumentException(
String.format("Invalid indices [%d, %d]", beginIndex, endIndex));
}
ByteBuffer slice = buffer.slice();
slice.position(beginIndex - buffer.position());
slice.limit(endIndex - buffer.position());
return slice;
}
}
| |
/*
* Copyright 2014-2019 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.archive;
import io.aeron.archive.client.ArchiveException;
import io.aeron.archive.codecs.*;
import io.aeron.logbuffer.*;
import org.agrona.DirectBuffer;
import org.agrona.collections.ArrayUtil;
class ControlRequestAdapter implements FragmentHandler
{
private final ControlRequestListener listener;
private final MessageHeaderDecoder headerDecoder = new MessageHeaderDecoder();
private final ConnectRequestDecoder connectRequestDecoder = new ConnectRequestDecoder();
private final CloseSessionRequestDecoder closeSessionRequestDecoder = new CloseSessionRequestDecoder();
private final StartRecordingRequestDecoder startRecordingRequestDecoder = new StartRecordingRequestDecoder();
private final StopRecordingRequestDecoder stopRecordingRequestDecoder = new StopRecordingRequestDecoder();
private final ReplayRequestDecoder replayRequestDecoder = new ReplayRequestDecoder();
private final StopReplayRequestDecoder stopReplayRequestDecoder = new StopReplayRequestDecoder();
private final ListRecordingsRequestDecoder listRecordingsRequestDecoder = new ListRecordingsRequestDecoder();
private final ListRecordingsForUriRequestDecoder listRecordingsForUriRequestDecoder =
new ListRecordingsForUriRequestDecoder();
private final ListRecordingRequestDecoder listRecordingRequestDecoder = new ListRecordingRequestDecoder();
private final ExtendRecordingRequestDecoder extendRecordingRequestDecoder = new ExtendRecordingRequestDecoder();
private final RecordingPositionRequestDecoder recordingPositionRequestDecoder =
new RecordingPositionRequestDecoder();
private final TruncateRecordingRequestDecoder truncateRecordingRequestDecoder =
new TruncateRecordingRequestDecoder();
private final StopRecordingSubscriptionRequestDecoder stopRecordingSubscriptionRequestDecoder =
new StopRecordingSubscriptionRequestDecoder();
private final StopPositionRequestDecoder stopPositionRequestDecoder = new StopPositionRequestDecoder();
private final FindLastMatchingRecordingRequestDecoder findLastMatchingRecordingRequestDecoder =
new FindLastMatchingRecordingRequestDecoder();
private final ListRecordingSubscriptionsRequestDecoder listRecordingSubscriptionsRequestDecoder =
new ListRecordingSubscriptionsRequestDecoder();
ControlRequestAdapter(final ControlRequestListener listener)
{
this.listener = listener;
}
@SuppressWarnings("MethodLength")
public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header)
{
headerDecoder.wrap(buffer, offset);
final int schemaId = headerDecoder.schemaId();
if (schemaId != MessageHeaderDecoder.SCHEMA_ID)
{
throw new ArchiveException("expected schemaId=" + MessageHeaderDecoder.SCHEMA_ID + ", actual=" + schemaId);
}
final int templateId = headerDecoder.templateId();
switch (templateId)
{
case ConnectRequestDecoder.TEMPLATE_ID:
{
connectRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onConnect(
connectRequestDecoder.correlationId(),
connectRequestDecoder.responseStreamId(),
connectRequestDecoder.version(),
connectRequestDecoder.responseChannel());
break;
}
case CloseSessionRequestDecoder.TEMPLATE_ID:
{
closeSessionRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onCloseSession(closeSessionRequestDecoder.controlSessionId());
break;
}
case StartRecordingRequestDecoder.TEMPLATE_ID:
{
startRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onStartRecording(
startRecordingRequestDecoder.controlSessionId(),
startRecordingRequestDecoder.correlationId(),
startRecordingRequestDecoder.streamId(),
startRecordingRequestDecoder.channel(),
startRecordingRequestDecoder.sourceLocation());
break;
}
case StopRecordingRequestDecoder.TEMPLATE_ID:
{
stopRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onStopRecording(
stopRecordingRequestDecoder.controlSessionId(),
stopRecordingRequestDecoder.correlationId(),
stopRecordingRequestDecoder.streamId(),
stopRecordingRequestDecoder.channel());
break;
}
case ReplayRequestDecoder.TEMPLATE_ID:
{
replayRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onStartReplay(
replayRequestDecoder.controlSessionId(),
replayRequestDecoder.correlationId(),
replayRequestDecoder.recordingId(),
replayRequestDecoder.position(),
replayRequestDecoder.length(),
replayRequestDecoder.replayStreamId(),
replayRequestDecoder.replayChannel());
break;
}
case StopReplayRequestDecoder.TEMPLATE_ID:
{
stopReplayRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onStopReplay(
stopReplayRequestDecoder.controlSessionId(),
stopReplayRequestDecoder.correlationId(),
stopReplayRequestDecoder.replaySessionId());
break;
}
case ListRecordingsRequestDecoder.TEMPLATE_ID:
{
listRecordingsRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onListRecordings(
listRecordingsRequestDecoder.controlSessionId(),
listRecordingsRequestDecoder.correlationId(),
listRecordingsRequestDecoder.fromRecordingId(),
listRecordingsRequestDecoder.recordCount());
break;
}
case ListRecordingsForUriRequestDecoder.TEMPLATE_ID:
{
listRecordingsForUriRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final int channelLength = listRecordingsForUriRequestDecoder.channelLength();
final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength];
listRecordingsForUriRequestDecoder.getChannel(bytes, 0, channelLength);
listener.onListRecordingsForUri(
listRecordingsForUriRequestDecoder.controlSessionId(),
listRecordingsForUriRequestDecoder.correlationId(),
listRecordingsForUriRequestDecoder.fromRecordingId(),
listRecordingsForUriRequestDecoder.recordCount(),
listRecordingsForUriRequestDecoder.streamId(),
bytes);
break;
}
case ListRecordingRequestDecoder.TEMPLATE_ID:
{
listRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onListRecording(
listRecordingRequestDecoder.controlSessionId(),
listRecordingRequestDecoder.correlationId(),
listRecordingRequestDecoder.recordingId());
break;
}
case ExtendRecordingRequestDecoder.TEMPLATE_ID:
{
extendRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onExtendRecording(
extendRecordingRequestDecoder.controlSessionId(),
extendRecordingRequestDecoder.correlationId(),
extendRecordingRequestDecoder.recordingId(),
extendRecordingRequestDecoder.streamId(),
extendRecordingRequestDecoder.channel(),
extendRecordingRequestDecoder.sourceLocation());
break;
}
case RecordingPositionRequestDecoder.TEMPLATE_ID:
{
recordingPositionRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onGetRecordingPosition(
recordingPositionRequestDecoder.controlSessionId(),
recordingPositionRequestDecoder.correlationId(),
recordingPositionRequestDecoder.recordingId());
break;
}
case TruncateRecordingRequestDecoder.TEMPLATE_ID:
{
truncateRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onTruncateRecording(
truncateRecordingRequestDecoder.controlSessionId(),
truncateRecordingRequestDecoder.correlationId(),
truncateRecordingRequestDecoder.recordingId(),
truncateRecordingRequestDecoder.position());
break;
}
case StopRecordingSubscriptionRequestDecoder.TEMPLATE_ID:
{
stopRecordingSubscriptionRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onStopRecordingSubscription(
stopRecordingSubscriptionRequestDecoder.controlSessionId(),
stopRecordingSubscriptionRequestDecoder.correlationId(),
stopRecordingSubscriptionRequestDecoder.subscriptionId());
break;
}
case StopPositionRequestDecoder.TEMPLATE_ID:
{
stopPositionRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onGetStopPosition(
stopPositionRequestDecoder.controlSessionId(),
stopPositionRequestDecoder.correlationId(),
stopPositionRequestDecoder.recordingId());
break;
}
case FindLastMatchingRecordingRequestDecoder.TEMPLATE_ID:
{
findLastMatchingRecordingRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
final int channelLength = findLastMatchingRecordingRequestDecoder.channelLength();
final byte[] bytes = 0 == channelLength ? ArrayUtil.EMPTY_BYTE_ARRAY : new byte[channelLength];
findLastMatchingRecordingRequestDecoder.getChannel(bytes, 0, channelLength);
listener.onFindLastMatchingRecording(
findLastMatchingRecordingRequestDecoder.controlSessionId(),
findLastMatchingRecordingRequestDecoder.correlationId(),
findLastMatchingRecordingRequestDecoder.minRecordingId(),
findLastMatchingRecordingRequestDecoder.sessionId(),
findLastMatchingRecordingRequestDecoder.streamId(),
bytes);
break;
}
case ListRecordingSubscriptionsRequestDecoder.TEMPLATE_ID:
{
listRecordingSubscriptionsRequestDecoder.wrap(
buffer,
offset + MessageHeaderDecoder.ENCODED_LENGTH,
headerDecoder.blockLength(),
headerDecoder.version());
listener.onListRecordingSubscriptions(
listRecordingSubscriptionsRequestDecoder.controlSessionId(),
listRecordingSubscriptionsRequestDecoder.correlationId(),
listRecordingSubscriptionsRequestDecoder.pseudoIndex(),
listRecordingSubscriptionsRequestDecoder.subscriptionCount(),
listRecordingSubscriptionsRequestDecoder.applyStreamId() == BooleanType.TRUE,
listRecordingSubscriptionsRequestDecoder.streamId(),
listRecordingSubscriptionsRequestDecoder.channel());
}
}
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.test;
import java.io.*;
import java.net.*;
import java.util.*;
public class MockDatagramSocket
extends DatagramSocket implements MockDatagramSocketExtras {
/**
* This class is a mock implementation of DatagramSocket to be used for
* unit testing
*/
private boolean isClosed = false;
private int localPort = -1;
private Vector sentPackets;
private SimpleQueue.Fifo receiveQueue;
//stubs for DatagramSocket methods
public MockDatagramSocket() throws SocketException{
sentPackets = new Vector();
receiveQueue = new SimpleQueue.Fifo();
}
/**
* @param port this is ignored and only here to override the
* DatagramSocket contructor
*/
public MockDatagramSocket(int port) throws SocketException{
this();
}
/**
* @param port this is ignored and only here to override the
* DatagramSocket contructor
* @param laddr ditto.
*/
public MockDatagramSocket(int port, InetAddress laddr)
throws SocketException{
this();
}
/**
* Flags this as a closed socket. Can be tested with isClosed method
*@see #isClosed
*/
public void close(){
this.isClosed = true;
}
/**
* Stubbed
*/
public void connect(InetAddress address, int port){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public void disconnect(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public InetAddress getInetAddress(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public InetAddress getLocalAddress(){
throw new UnsupportedOperationException("Not Implemented");
}
public int getLocalPort(){
return localPort;
}
public void setLocalPort(int localPort) {
this.localPort=localPort;
}
/**
* Stubbed
*/
public int getPort(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public int getReceiverBufferSize(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public int getSendBufferSize(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* Stubbed
*/
public int getSoTimeout(){
throw new UnsupportedOperationException("Not Implemented");
}
/**
* "Receive" a packet that has been added to the receive queue with
* {@link #addToReceiveQueue}. Waits until a packet arrives if necessary
* @param p pre-allocated packet to receive data from queued packet
*/
public void receive(DatagramPacket p) throws IOException {
DatagramPacket qPkt = (DatagramPacket)receiveQueue.get();
if (qPkt == null) {
// didn't get a packet, must have been interrupted
throw new IOException("MockDatagramSocket.receive interrupted");
}
// copy fields from "received" packet into p.
// In Java 1.4 setData() can make the packet larger, so we must explicitly
// set the length to the smaller of p and the received packet.
int plen = p.getLength();
p.setData(qPkt.getData());
p.setLength(Math.min(plen, qPkt.getLength()));
p.setAddress(qPkt.getAddress());
p.setPort(qPkt.getPort());
}
/**
* "Send" a packet by adding it to the output vector (which can be read with
* {@link #getSentPackets})
* @param p packet to "send"
*/
public void send(DatagramPacket p){
// enqueue a copy, as that's what a real DatagramSocket would do
DatagramPacket qPkt =
new DatagramPacket(new String(p.getData()).getBytes(),
p.getOffset(),
p.getLength(),
p.getAddress(),
p.getPort());
sentPackets.add(qPkt);
}
public static void setDatagramSocketImplFactory(DatagramSocketImplFactory
fac){
throw new UnsupportedOperationException("Not Implemented");
}
public void setReceiveBufferSize(int size){
throw new UnsupportedOperationException("Not Implemented");
}
public void setSendBufferSize(int size){
throw new UnsupportedOperationException("Not Implemented");
}
public void setSoTimeout(int timeout){
throw new UnsupportedOperationException("Not Implemented");
}
//non-DatagramSocket methods
/**
* @return true if close() has been called on this socket, false otherwise
* @see #close
*/
public boolean isClosed(){
return this.isClosed;
}
/**
* @return a vector containing all packets which have been "sent" on the
* socket
* @see #send
*/
public Vector getSentPackets(){
return sentPackets;
}
/**
* Add a packet to the receive queue to be processed by {@link #receive}
*/
public void addToReceiveQueue(DatagramPacket packet){
receiveQueue.put(packet);
}
}
| |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package redex;
import static org.fest.assertions.api.Assertions.assertThat;
import org.junit.Test;
interface IA {
// PRECHECK: method: virtual redex.IA.do_something
// POSTCHECK: method: virtual redex.IA.do_something
public int do_something();
}
interface IE {
// PRECHECK: method: virtual redex.IE.do_something
// POSTCHECK-NOT: method: virtual redex.IE.do_something
public int do_something();
}
abstract class BB implements IA {
public BB return_self() { return this; }
}
class CC extends BB {
// PRECHECK: method: virtual redex.CC.do_something
// POSTCHECK: method: virtual redex.CC.do_something
public int do_something() { return 1; }
@Override
public BB return_self() {
return this;
}
}
class D extends BB {
// PRECHECK: method: virtual redex.D.do_something
// POSTCHECK: method: virtual redex.D.do_something
public int do_something() { return 2; }
}
abstract class F implements IE {
// PRECHECK: method: virtual redex.F.do_something
// POSTCHECK-NOT: method: virtual redex.F.do_something
public abstract int do_something();
}
class G extends F {
// PRECHECK: method: virtual redex.G.do_something
// POSTCHECK-NOT: method: virtual redex.G.do_something
public int do_something() { return 3; }
}
class H extends F {
// PRECHECK: method: virtual redex.H.do_something
// POSTCHECK-NOT: method: virtual redex.H.do_something
public int do_something() { return 4; }
}
interface J {
int getInt();
}
interface K {
int getAnotherInt();
}
interface IGetInt {
public int getInt();
public int getAnotherInt();
public J getJ();
public int add();
}
abstract class GetInt implements IGetInt {}
class GetInt1 extends GetInt implements J, K {
// CHECK: method: virtual redex.GetInt1.getAnotherInt
public int getAnotherInt() { return 2; }
// PRECHECK: method: virtual redex.GetInt1.getInt
// POSTCHECK-NOT: method: virtual redex.GetInt1.getInt
public int getInt() { return 1; }
// PRECHECK: method: virtual redex.GetInt1.getJ
// POSTCHECK-NOT: method: virtual redex.GetInt1.getJ
public J getJ() { return this; }
// PRECHECK: method: virtual redex.GetInt1.add
// POSTCHECK: method: virtual redex.GetInt1.add
public int add() { J j = this; K k = this; return j.getInt() + k.getAnotherInt(); }
}
class GetInt2 extends GetInt implements J, K {
// CHECK: method: virtual redex.GetInt2.getAnotherInt
public int getAnotherInt() { return 3; }
// PRECHECK: method: virtual redex.GetInt2.getInt
// POSTCHECK-NOT: method: virtual redex.GetInt2.getInt
public int getInt() { return 1; }
// PRECHECK: method: virtual redex.GetInt2.getJ
// POSTCHECK-NOT: method: virtual redex.GetInt2.getJ
public J getJ() { return this; }
// PRECHECK: method: virtual redex.GetInt2.add
// POSTCHECK: method: virtual redex.GetInt2.add
public int add() { J j = this; K k = this; return j.getInt() + k.getAnotherInt(); }
}
class GetInt3 extends GetInt implements J, K {
// CHECK: method: virtual redex.GetInt3.getAnotherInt
public int getAnotherInt() { return 4; }
// PRECHECK: method: virtual redex.GetInt3.getInt
// POSTCHECK-NOT: method: virtual redex.GetInt3.getInt
public int getInt() { return 1; }
// PRECHECK: method: virtual redex.GetInt3.getJ
// POSTCHECK-NOT: method: virtual redex.GetInt3.getJ
public J getJ() { return this; }
// PRECHECK: method: virtual redex.GetInt3.add
// POSTCHECK: method: virtual redex.GetInt3.add
public int add() { J j = this; K k = this; return j.getInt() + k.getAnotherInt(); }
}
class SameImplementation {
// PRECHECK: method: virtual redex.SameImplementation.getInt
// POSTCHECK-NOT: method: virtual redex.SameImplementation.getInt
public int getInt() { return 1; }
}
class SameImplementation2 extends SameImplementation {
// PRECHECK: method: virtual redex.SameImplementation2.getInt
// POSTCHECK-NOT: method: virtual redex.SameImplementation2.getInt
@Override
public int getInt() {
return 1;
}
}
abstract class AAA {
public abstract int return_BBB_field();
public abstract BBB return_BBB_self();
}
class BBB extends AAA {
int field = 42;
@Override
public int return_BBB_field() {
return field;
}
@Override
public BBB return_BBB_self() {
return this;
}
}
public class TrueVirtualInlineTest {
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_do_something
@Test
public void test_do_something() {
CC c = new CC();
// PRECHECK: invoke-virtual {{.*}} redex.CC.do_something
// POSTCHECK-NOT: invoke-virtual {{.*}} redex.CC.do_something
assertThat(c.do_something()).isEqualTo(1);
H h = new H();
// PRECHECK: invoke-virtual {{.*}} redex.H.do_something
// POSTCHECK-NOT: invoke-virtual {{.*}} redex.H.do_something
assertThat(h.do_something()).isEqualTo(4);
BB b;
if (Math.random() > 1) {
b = new CC();
} else {
b = new D();
}
// PRECHECK: invoke-virtual {{.*}} redex.BB.do_something
// POSTCHECK: invoke-virtual {{.*}} redex.BB.do_something
assertThat(b.do_something()).isEqualTo(2);
IA a = new CC();
// PRECHECK: invoke-interface {{.*}} redex.IA.do_something
// POSTCHECK: invoke-interface {{.*}} redex.IA.do_something
assertThat(a.do_something()).isEqualTo(1);
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_return_self
@Test
public void test_return_self() {
CC c = new CC();
// PRECHECK: invoke-virtual {{.*}} redex.CC.return_self
// POSTCHECK-NOT: invoke-virtual {{.*}} redex.CC.return_self
assertThat(c.return_self() instanceof CC).isTrue();
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_return_BBB_field
@Test
public void test_return_BBB_field() {
AAA a = new BBB();
// PRECHECK: invoke-virtual {{.*}} redex.AAA.return_BBB_field
// POSTCHECK: check-cast {{.*}} redex.BBB
assertThat(a.return_BBB_field() == 42).isTrue();
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_return_BBB_self
@Test
public void test_return_BBB_self() {
AAA a = new BBB();
// PRECHECK: invoke-virtual {{.*}} redex.AAA.return_BBB_self
// POSTCHECK: check-cast {{.*}} redex.BBB
assertThat(a.return_BBB_self() instanceof BBB).isTrue();
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_same_implementation
@Test
public void test_same_implementation() {
GetInt get_int;
if (Math.random() > 1) {
get_int = new GetInt1();
} else if (Math.random() < 0) {
get_int = new GetInt3();
} else {
// get_int should be of type GetInt2
get_int = new GetInt2();
}
// PRECHECK: invoke-virtual {{.*}} redex.GetInt.getInt
// POSTCHECK-NOT: invoke-virtual {{.*}} redex.GetInt.getInt
assertThat(get_int.getInt()).isEqualTo(1);
// PRECHECK: invoke-virtual {{.*}} redex.GetInt.getAnotherInt
// POSTCHECK: invoke-virtual {{.*}} redex.GetInt.getAnotherInt
assertThat(get_int.getAnotherInt()).isEqualTo(3);
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_same_implementation
@Test
public void test_same_implementation2() {
SameImplementation get_int;
if (Math.random() > 0.5) {
get_int = new SameImplementation();
} else {
get_int = new SameImplementation2();
}
// PRECHECK: invoke-virtual {{.*}} redex.SameImplementation.getInt
// POSTCHECK-NOT: invoke-virtual {{.*}} redex.SameImplementation.getInt
assertThat(get_int.getInt()).isEqualTo(1);
// CHECK: return-void
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_same_implementation3
@Test
public void test_same_implementation3() {
GetInt get_int;
if (Math.random() > 1) {
get_int = new GetInt1();
} else if (Math.random() < 0) {
get_int = new GetInt3();
} else {
get_int = new GetInt2();
}
// PRECHECK: invoke-virtual {{.*}} redex.GetInt.getJ
// POSTCHECK: check-cast {{.*}} redex.J
assertThat(get_int.getJ() instanceof GetInt).isTrue();
}
// CHECK: method: virtual redex.TrueVirtualInlineTest.test_same_implementation4
@Test
public void test_same_implementation4() {
GetInt get_int;
if (Math.random() > 1) {
get_int = new GetInt1();
} else if (Math.random() < 0) {
get_int = new GetInt3();
} else {
get_int = new GetInt2();
}
// PRECHECK: invoke-virtual {{.*}} redex.GetInt.add
// POSTCHECK: invoke-virtual {{.*}} redex.GetInt.add
assertThat(get_int.add() > 0).isTrue();
}
}
| |
package game.inventory;
import game.item.Item;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* An {@code Inventory} is an arbitrary collection of {@link Item items},
* organized as an array of Slots. Each Slot references an Item via an itemId
* and stores the amount of that item there.
*
* @author Aaron Carson
* @version Jun 12, 2015
*/
public class SlotInventory implements Inventory<Item>
{
private ItemSlot[] slots;
/**
* Create a new SlotInventory of the given size.
*
* @param size The size of the SlotInventory.
*/
public SlotInventory(int size) {
slots = new ItemSlot[size];
}
/**
* Create a new SlotInventory of default size of 10.
*/
public SlotInventory() {
slots = new ItemSlot[10];
}
@Override
public int getCapacity() {
return slots.length;
}
@Override
public boolean contains(Item item) {
return contains(item, 1);
}
@Override
public boolean contains(Item item, int amount) {
int count = 0;
for (int i = 0; i < slots.length; i++) {
// if a match is found, increment the count.
if (!isEmptyAt(i) && slots[i].holds(item)) {
count += slots[i].getQuantity();
// check if enough were found.
if (count >= amount) {
return true;
}
}
}
return false;
}
@Override
public int count(Item item) {
int count = 0;
for (int i = 0; i < slots.length; i++) {
if (!isEmptyAt(i) && slots[i].holds(item)) {
// if item is found, count how many are in the slot.
count += slots[i].getQuantity();
}
}
return count;
}
@Override
public int countAll() {
int count = 0;
for (int i = 0; i < slots.length; i++) {
if (!isEmptyAt(i)) count += slots[i].getQuantity();
}
return count;
}
@Override
public int getEmptySlotsFor(Item item) {
int emptySlots = 0;
for (int i = 0; i < slots.length; i++) {
// case 1: an empty slot.
if (isEmptyAt(i)) {
emptySlots += item.stack;
}
// case 2: a partially full slot of the item.
else if (slots[i].holds(item)) {
emptySlots += slots[i].getRemainingCapacity();
}
// case 3: the slot is of another item.
}
return emptySlots;
}
@Override
public boolean isFull() {
for (int i = 0; i < slots.length; i++) {
// if any slot is empty, or not full ...
if (slots[i] == null || !slots[i].isFull()) return false;
}
// if every slot is not empty, then it is
return true;
}
@Override
public boolean isEmpty() {
return !isFull();
}
@Override
public boolean canHold(Item item) {
return getEmptySlotsFor(item) > 0;
}
/**
* Adds the given item, if there is room, to the first ItemSlot with room.
*/
@Override
public boolean add(Item item) {
// 1. Try to add to itemStacks that have room, then add to empty slots.
for (int i = 0; i < slots.length; i++) {
if (slots[i] != null && slots[i].holds(item) && !slots[i].isFull()) {
slots[i].add();
return true;
}
}
// 2. Add any remaining items to empty slots.
for (int i = 0; i < slots.length; i++) {
if (isEmptyAt(i)) {
slots[i] = new ItemSlot(item);
slots[i].add();
return true;
}
}
// unable to add the single item, return false.
return false;
}
/**
* First looks ahead to verify there is space in the Inventory, then it adds
* to matching item stacks, then adds to empty slots. Runtime is ~ O(3n).
*/
@Override
public boolean add(Item item, int amount) {
// 1. verify space for items.
if (getEmptySlotsFor(item) < amount) {
return false;
}
// 2. Add to itemStacks that have room, then add to empty slots.
for (int i = 0; i < slots.length && amount > 0; i++) {
if (slots[i] != null && slots[i].holds(item)) {
amount = slots[i].addUntilFull(amount);
}
}
// 3. Add any remaining items to empty slots.
for (int i = 0; i < slots.length && amount > 0; i++) {
if (isEmptyAt(i)) {
slots[i] = new ItemSlot(item);
amount = slots[i].addUntilFull(amount);
}
}
// all items must be added by this point, return true.
return true;
}
@Override
public boolean addAll(Collection<Item> items) {
boolean changed = false;
for (Item item : items) {
changed |= add(item);
}
return changed;
}
@Override
public boolean addAll(Inventory<Item> inventory) {
boolean changed = false;
for (int i = 0; i < inventory.getCapacity(); i++) {
if (!inventory.isEmptyAt(i)) {
Item item = inventory.get(i);
int amount = inventory.getQuantity(i);
changed |= add(item, amount);
}
}
return changed;
}
@Override
public Item get(int index) {
if (isEmptyAt(index)) return null;
else return slots[index].getItem();
}
@Override
public int getQuantity(int index) {
if (isEmptyAt(index)) return 0;
else return slots[index].getQuantity();
}
@Override
public boolean isEmptyAt(int index) {
return slots[index] == null || slots[index].isEmpty();
}
@Override
public boolean swap(int index1, int index2) {
ItemSlot slot1 = slots[index1];
slots[index1] = slots[index2];
slots[index2] = slot1;
return true;
}
@Override
public boolean remove(Item item) {
// int count = 0;
for (int i = 0; i < slots.length; i++) {
// if a match is found, remove the item.
if (!isEmptyAt(i) && slots[i].holds(item)) {
boolean removed = slots[i].remove();
if (removed) return true;
}
}
return false;
}
@Override
public boolean withinBounds(int index) {
// TODO Auto-generated method stub
return false;
}
@Override
public Item remove(int index) {
if (isEmptyAt(index)) return null;
slots[index].remove();
return slots[index].getItem();
}
@Override
public void sort() {
// a list of items that have already been ordered.
for (int i = 0; i < slots.length; i++) {
// skip if the slot empty.
if (isEmptyAt(i)) continue;
ItemSlot top = slots[i];
Item item = top.getItem();
/*
* step from the top and bottom, adding each ItemSlot of the correct
* item from the bottom to the ItemSlot at the top, until the
* pointers meet and the top ItemSlots are full.
*/
for (int j = slots.length - 1; j > i; j--) {
// skip this
if (isEmptyAt(j)) continue;
ItemSlot bottom = slots[j];
// add the items in the bottom slot to the top slot till full.
if (bottom.holds(item)) {
int bottomQuantity = bottom.getQuantity();
int topCapacity = top.getRemainingCapacity();
bottom.remove(Math.min(bottomQuantity, topCapacity));
top.addUntilFull(bottomQuantity);
// remove slot if empty.
if (bottom.isEmpty()) slots[j] = null;
// step to next slot if the top is filled.
if(top.isFull()) break;
}
//for (int k = bottom.getQuantity(); k > 0 && top.hasCapacity(); k--){
//bottom.remove();
//slot.add();
//slot.re
//}
}
}
// 2. Then, sort the array.
Arrays.sort(slots, new ItemSlotComparator());
}
@Override
public void clear() {
slots = new ItemSlot[slots.length];
}
@Override
public String toString() {
return getContents();
}
}
| |
package info.nightscout.android.model.store;
import java.util.Date;
import io.realm.RealmObject;
import io.realm.annotations.Index;
public class DataStore extends RealmObject {
@Index
private long timestamp;
//private boolean debug_bump;
// do not send cgm/pump backfill data prior to this date
// used to stop overwriting older NS entries
// user option to override (we clear old data from NS to stop multiple entries and refresh using keys)
private Date nightscoutLimitDate = null;
private long nightscoutCgmCleanFrom;
private long nightscoutPumpCleanFrom;
private boolean nightscoutUpload = false;
private String nightscoutURL = "";
private String nightscoutSECRET = "";
private long nightscoutReportTime = 0;
private boolean nightscoutAvailable = false;
private boolean nightscoutCareportal = false;
private boolean requestProfile = false;
private boolean requestPumpHistory = false;
private boolean requestCgmHistory = false;
private int pumpCgmNA;
private int commsSuccess;
private int commsError;
private int commsConnectError;
private int commsSignalError;
private int commsSgvSuccess;
private int pumpLostSensorError;
private int pumpClockError;
private int pumpBatteryError;
private boolean mmolxl;
private boolean mmolxlDecimals;
private long pollInterval;
private long lowBatPollInterval;
private boolean doublePollOnPumpAway;
private boolean sysEnableCgmHistory;
private int sysCgmHistoryDays;
private boolean sysEnablePumpHistory;
private int sysPumpHistoryDays;
private int sysPumpHistoryFrequency;
private boolean sysEnableClashProtect;
private boolean sysEnablePollOverride;
private long sysPollGracePeriod;
private long sysPollRecoveryPeriod;
private long sysPollWarmupPeriod;
private long sysPollErrorRetry;
private long sysPollOldSgvRetry;
private boolean sysEnableWait500ms;
private boolean dbgEnableExtendedErrors;
private boolean dbgEnableUploadErrors;
private boolean nsEnableTreatments;
private boolean nsEnableHistorySync;
private boolean nsEnableFingerBG;
private boolean nsEnableCalibrationInfo;
private boolean nsEnableCalibrationInfoNow;
private boolean nsEnableSensorChange;
private boolean nsEnableReservoirChange;
private boolean nsEnableBatteryChange;
private boolean nsEnableLifetimes;
private boolean nsEnableProfileUpload;
private boolean nsEnableProfileSingle;
private boolean nsEnableProfileOffset;
private int nsProfileDefault;
private float nsActiveInsulinTime;
private boolean nsEnablePatternChange;
private boolean nsEnableInsertBGasCGM;
private boolean urchinEnable;
private int urchinBasalPeriod;
private int urchinBasalScale;
private boolean urchinBolusGraph;
private boolean urchinBolusTags;
private int urchinBolusPop;
private int urchinTimeStyle;
private int urchinDurationStyle;
private int urchinUnitsStyle;
private int urchinBatteyStyle;
private int urchinConcatenateStyle;
private String urchinCustomText1;
private String urchinCustomText2;
private byte[] urchinStatusLayout;
private String nameBasalPattern1;
private String nameBasalPattern2;
private String nameBasalPattern3;
private String nameBasalPattern4;
private String nameBasalPattern5;
private String nameBasalPattern6;
private String nameBasalPattern7;
private String nameBasalPattern8;
private String nameTempBasalPreset1;
private String nameTempBasalPreset2;
private String nameTempBasalPreset3;
private String nameTempBasalPreset4;
private String nameTempBasalPreset5;
private String nameTempBasalPreset6;
private String nameTempBasalPreset7;
private String nameTempBasalPreset8;
private String nameBolusPreset1;
private String nameBolusPreset2;
private String nameBolusPreset3;
private String nameBolusPreset4;
private String nameBolusPreset5;
private String nameBolusPreset6;
private String nameBolusPreset7;
private String nameBolusPreset8;
private boolean nameBasalPatternChanged = false;
public DataStore() {
this.timestamp = new Date().getTime();
}
public Date getNightscoutLimitDate() {
return nightscoutLimitDate;
}
public void setNightscoutLimitDate(Date nightscoutLimitDate) {
this.nightscoutLimitDate = nightscoutLimitDate;
}
public long getNightscoutCgmCleanFrom() {
return nightscoutCgmCleanFrom;
}
public void setNightscoutCgmCleanFrom(long nightscoutCgmCleanFrom) {
this.nightscoutCgmCleanFrom = nightscoutCgmCleanFrom;
}
public long getNightscoutPumpCleanFrom() {
return nightscoutPumpCleanFrom;
}
public void setNightscoutPumpCleanFrom(long nightscoutPumpCleanFrom) {
this.nightscoutPumpCleanFrom = nightscoutPumpCleanFrom;
}
public boolean isNightscoutUpload() {
return nightscoutUpload;
}
public void setNightscoutUpload(boolean nightscoutUpload) {
this.nightscoutUpload = nightscoutUpload;
}
public String getNightscoutURL() {
return nightscoutURL;
}
public void setNightscoutURL(String nightscoutURL) {
this.nightscoutURL = nightscoutURL;
}
public String getNightscoutSECRET() {
return nightscoutSECRET;
}
public void setNightscoutSECRET(String nightscoutSECRET) {
this.nightscoutSECRET = nightscoutSECRET;
}
public long getNightscoutReportTime() {
return nightscoutReportTime;
}
public void setNightscoutReportTime(long nightscoutReportTime) {
this.nightscoutReportTime = nightscoutReportTime;
}
public boolean isNightscoutAvailable() {
return nightscoutAvailable;
}
public void setNightscoutAvailable(boolean nightscoutAvailable) {
this.nightscoutAvailable = nightscoutAvailable;
}
public boolean isNightscoutCareportal() {
return nightscoutCareportal;
}
public void setNightscoutCareportal(boolean nightscoutCareportal) {
this.nightscoutCareportal = nightscoutCareportal;
}
public boolean isRequestProfile() {
return requestProfile;
}
public void setRequestProfile(boolean requestProfile) {
this.requestProfile = requestProfile;
}
public boolean isRequestPumpHistory() {
return requestPumpHistory;
}
public void setRequestPumpHistory(boolean requestPumpHistory) {
this.requestPumpHistory = requestPumpHistory;
}
public boolean isRequestCgmHistory() {
return requestCgmHistory;
}
public void setRequestCgmHistory(boolean requestCgmHistory) {
this.requestCgmHistory = requestCgmHistory;
}
public int getPumpCgmNA() {
return pumpCgmNA;
}
public void setPumpCgmNA(int pumpCgmNA) {
this.pumpCgmNA = pumpCgmNA;
}
public int getCommsSuccess() {
return commsSuccess;
}
public void setCommsSuccess(int commsSuccess) {
this.commsSuccess = commsSuccess;
}
public int getCommsError() {
return commsError;
}
public void setCommsError(int commsError) {
this.commsError = commsError;
}
public int getCommsConnectError() {
return commsConnectError;
}
public void setCommsConnectError(int commsConnectError) {
this.commsConnectError = commsConnectError;
}
public int getCommsSignalError() {
return commsSignalError;
}
public void setCommsSignalError(int commsSignalError) {
this.commsSignalError = commsSignalError;
}
public int getCommsSgvSuccess() {
return commsSgvSuccess;
}
public void setCommsSgvSuccess(int commsSgvSuccess) {
this.commsSgvSuccess = commsSgvSuccess;
}
public int getPumpLostSensorError() {
return pumpLostSensorError;
}
public void setPumpLostSensorError(int pumpLostSensorError) {
this.pumpLostSensorError = pumpLostSensorError;
}
public int getPumpClockError() {
return pumpClockError;
}
public void setPumpClockError(int pumpClockError) {
this.pumpClockError = pumpClockError;
}
public int getPumpBatteryError() {
return pumpBatteryError;
}
public void setPumpBatteryError(int pumpBatteryError) {
this.pumpBatteryError = pumpBatteryError;
}
public void clearAllCommsErrors() {
this.pumpCgmNA = 0;
this.commsSuccess = 0;
this.commsError = 0;
this.commsConnectError = 0;
this.commsSignalError = 0;
this.commsSgvSuccess = 0;
this.pumpLostSensorError = 0;
this.pumpClockError = 0;
this.pumpBatteryError = 0;
}
public boolean isMmolxl() {
return mmolxl;
}
public void setMmolxl(boolean mmolxl) {
this.mmolxl = mmolxl;
}
public boolean isMmolxlDecimals() {
return mmolxlDecimals;
}
public void setMmolxlDecimals(boolean mmolxlDecimals) {
this.mmolxlDecimals = mmolxlDecimals;
}
public long getPollInterval() {
return pollInterval;
}
public void setPollInterval(long pollInterval) {
this.pollInterval = pollInterval;
}
public long getLowBatPollInterval() {
return lowBatPollInterval;
}
public void setLowBatPollInterval(long lowBatPollInterval) {
this.lowBatPollInterval = lowBatPollInterval;
}
public boolean isDoublePollOnPumpAway() {
return doublePollOnPumpAway;
}
public void setDoublePollOnPumpAway(boolean doublePollOnPumpAway) {
this.doublePollOnPumpAway = doublePollOnPumpAway;
}
public boolean isSysEnableCgmHistory() {
return sysEnableCgmHistory;
}
public void setSysEnableCgmHistory(boolean sysEnableCgmHistory) {
this.sysEnableCgmHistory = sysEnableCgmHistory;
}
public int getSysCgmHistoryDays() {
return sysCgmHistoryDays;
}
public void setSysCgmHistoryDays(int sysCgmHistoryDays) {
this.sysCgmHistoryDays = sysCgmHistoryDays;
}
public boolean isSysEnablePumpHistory() {
return sysEnablePumpHistory;
}
public void setSysEnablePumpHistory(boolean sysEnablePumpHistory) {
this.sysEnablePumpHistory = sysEnablePumpHistory;
}
public int getSysPumpHistoryDays() {
return sysPumpHistoryDays;
}
public void setSysPumpHistoryDays(int sysPumpHistoryDays) {
this.sysPumpHistoryDays = sysPumpHistoryDays;
}
public int getSysPumpHistoryFrequency() {
return sysPumpHistoryFrequency;
}
public void setSysPumpHistoryFrequency(int sysPumpHistoryFrequency) {
this.sysPumpHistoryFrequency = sysPumpHistoryFrequency;
}
public boolean isSysEnableClashProtect() {
return sysEnableClashProtect;
}
public void setSysEnableClashProtect(boolean sysEnableClashProtect) {
this.sysEnableClashProtect = sysEnableClashProtect;
}
public boolean isSysEnablePollOverride() {
return sysEnablePollOverride;
}
public void setSysEnablePollOverride(boolean sysEnablePollOverride) {
this.sysEnablePollOverride = sysEnablePollOverride;
}
public long getSysPollGracePeriod() {
return sysPollGracePeriod;
}
public void setSysPollGracePeriod(long sysPollGracePeriod) {
this.sysPollGracePeriod = sysPollGracePeriod;
}
public long getSysPollRecoveryPeriod() {
return sysPollRecoveryPeriod;
}
public void setSysPollRecoveryPeriod(long sysPollRecoveryPeriod) {
this.sysPollRecoveryPeriod = sysPollRecoveryPeriod;
}
public long getSysPollWarmupPeriod() {
return sysPollWarmupPeriod;
}
public void setSysPollWarmupPeriod(long sysPollWarmupPeriod) {
this.sysPollWarmupPeriod = sysPollWarmupPeriod;
}
public long getSysPollErrorRetry() {
return sysPollErrorRetry;
}
public void setSysPollErrorRetry(long sysPollErrorRetry) {
this.sysPollErrorRetry = sysPollErrorRetry;
}
public long getSysPollOldSgvRetry() {
return sysPollOldSgvRetry;
}
public void setSysPollOldSgvRetry(long sysPollOldSgvRetry) {
this.sysPollOldSgvRetry = sysPollOldSgvRetry;
}
public boolean isSysEnableWait500ms() {
return sysEnableWait500ms;
}
public void setSysEnableWait500ms(boolean sysEnableWait500ms) {
this.sysEnableWait500ms = sysEnableWait500ms;
}
public boolean isDbgEnableExtendedErrors() {
return dbgEnableExtendedErrors;
}
public void setDbgEnableExtendedErrors(boolean dbgEnableExtendedErrors) {
this.dbgEnableExtendedErrors = dbgEnableExtendedErrors;
}
public boolean isDbgEnableUploadErrors() {
return dbgEnableUploadErrors;
}
public void setDbgEnableUploadErrors(boolean dbgEnableUploadErrors) {
this.dbgEnableUploadErrors = dbgEnableUploadErrors;
}
public boolean isNsEnableTreatments() {
return nsEnableTreatments;
}
public void setNsEnableTreatments(boolean nsEnableTreatments) {
this.nsEnableTreatments = nsEnableTreatments;
}
public boolean isNsEnableHistorySync() {
return nsEnableHistorySync;
}
public void setNsEnableHistorySync(boolean nsEnableHistorySync) {
this.nsEnableHistorySync = nsEnableHistorySync;
}
public boolean isNsEnableFingerBG() {
return nsEnableFingerBG;
}
public void setNsEnableFingerBG(boolean nsEnableFingerBG) {
this.nsEnableFingerBG = nsEnableFingerBG;
}
public boolean isNsEnableCalibrationInfo() {
return nsEnableCalibrationInfo;
}
public void setNsEnableCalibrationInfo(boolean nsEnableCalibrationInfo) {
this.nsEnableCalibrationInfo = nsEnableCalibrationInfo;
}
public boolean isNsEnableCalibrationInfoNow() {
return nsEnableCalibrationInfoNow;
}
public void setNsEnableCalibrationInfoNow(boolean nsEnableCalibrationInfoNow) {
this.nsEnableCalibrationInfoNow = nsEnableCalibrationInfoNow;
}
public boolean isNsEnableSensorChange() {
return nsEnableSensorChange;
}
public void setNsEnableSensorChange(boolean nsEnableSensorChange) {
this.nsEnableSensorChange = nsEnableSensorChange;
}
public boolean isNsEnableReservoirChange() {
return nsEnableReservoirChange;
}
public void setNsEnableReservoirChange(boolean nsEnableReservoirChange) {
this.nsEnableReservoirChange = nsEnableReservoirChange;
}
public boolean isNsEnableBatteryChange() {
return nsEnableBatteryChange;
}
public void setNsEnableBatteryChange(boolean nsEnableBatteryChange) {
this.nsEnableBatteryChange = nsEnableBatteryChange;
}
public boolean isNsEnableLifetimes() {
return nsEnableLifetimes;
}
public void setNsEnableLifetimes(boolean nsEnableLifetimes) {
this.nsEnableLifetimes = nsEnableLifetimes;
}
public boolean isNsEnableProfileUpload() {
return nsEnableProfileUpload;
}
public void setNsEnableProfileUpload(boolean nsEnableProfileUpload) {
this.nsEnableProfileUpload = nsEnableProfileUpload;
}
public boolean isNsEnableProfileSingle() {
return nsEnableProfileSingle;
}
public void setNsEnableProfileSingle(boolean nsEnableProfileSingle) {
this.nsEnableProfileSingle = nsEnableProfileSingle;
}
public boolean isNsEnableProfileOffset() {
return nsEnableProfileOffset;
}
public void setNsEnableProfileOffset(boolean nsEnableProfileGroups) {
this.nsEnableProfileOffset = nsEnableProfileGroups;
}
public int getNsProfileDefault() {
return nsProfileDefault;
}
public void setNsProfileDefault(int nsProfileDefault) {
this.nsProfileDefault = nsProfileDefault;
}
public float getNsActiveInsulinTime() {
return nsActiveInsulinTime;
}
public void setNsActiveInsulinTime(float nsActiveInsulinTime) {
this.nsActiveInsulinTime = nsActiveInsulinTime;
}
public boolean isNsEnablePatternChange() {
return nsEnablePatternChange;
}
public void setNsEnablePatternChange(boolean nsEnablePatternChange) {
this.nsEnablePatternChange = nsEnablePatternChange;
}
public boolean isNsEnableInsertBGasCGM() {
return nsEnableInsertBGasCGM;
}
public void setNsEnableInsertBGasCGM(boolean nsEnableInsertBGasCGM) {
this.nsEnableInsertBGasCGM = nsEnableInsertBGasCGM;
}
public boolean isUrchinEnable() {
return urchinEnable;
}
public void setUrchinEnable(boolean urchinEnable) {
this.urchinEnable = urchinEnable;
}
public int getUrchinBasalPeriod() {
return urchinBasalPeriod;
}
public void setUrchinBasalPeriod(int urchinBasalPeriod) {
this.urchinBasalPeriod = urchinBasalPeriod;
}
public int getUrchinBasalScale() {
return urchinBasalScale;
}
public void setUrchinBasalScale(int urchinBasalScale) {
this.urchinBasalScale = urchinBasalScale;
}
public boolean isUrchinBolusGraph() {
return urchinBolusGraph;
}
public void setUrchinBolusGraph(boolean urchinBolusGraph) {
this.urchinBolusGraph = urchinBolusGraph;
}
public boolean isUrchinBolusTags() {
return urchinBolusTags;
}
public void setUrchinBolusTags(boolean urchinBolusTags) {
this.urchinBolusTags = urchinBolusTags;
}
public int getUrchinBolusPop() {
return urchinBolusPop;
}
public void setUrchinBolusPop(int urchinBolusPop) {
this.urchinBolusPop = urchinBolusPop;
}
public int getUrchinTimeStyle() {
return urchinTimeStyle;
}
public void setUrchinTimeStyle(int urchinTimeStyle) {
this.urchinTimeStyle = urchinTimeStyle;
}
public int getUrchinDurationStyle() {
return urchinDurationStyle;
}
public void setUrchinDurationStyle(int urchinDurationStyle) {
this.urchinDurationStyle = urchinDurationStyle;
}
public int getUrchinUnitsStyle() {
return urchinUnitsStyle;
}
public void setUrchinUnitsStyle(int urchinUnitsStyle) {
this.urchinUnitsStyle = urchinUnitsStyle;
}
public int getUrchinBatteyStyle() {
return urchinBatteyStyle;
}
public void setUrchinBatteyStyle(int urchinBatteyStyle) {
this.urchinBatteyStyle = urchinBatteyStyle;
}
public int getUrchinConcatenateStyle() {
return urchinConcatenateStyle;
}
public void setUrchinConcatenateStyle(int urchinConcatenateStyle) {
this.urchinConcatenateStyle = urchinConcatenateStyle;
}
public String getUrchinCustomText1() {
return urchinCustomText1;
}
public void setUrchinCustomText1(String urchinCustomText1) {
this.urchinCustomText1 = urchinCustomText1;
}
public String getUrchinCustomText2() {
return urchinCustomText2;
}
public void setUrchinCustomText2(String urchinCustomText2) {
this.urchinCustomText2 = urchinCustomText2;
}
public byte[] getUrchinStatusLayout() {
return urchinStatusLayout;
}
public void setUrchinStatusLayout(byte[] urchinStatusLayout) {
this.urchinStatusLayout = urchinStatusLayout;
}
public String getNameBasalPattern1() {
return nameBasalPattern1;
}
public void setNameBasalPattern1(String nameBasalPattern1) {
if (this.nameBasalPattern1 != null && !this.nameBasalPattern1.equals(nameBasalPattern1))
nameBasalPatternChanged = true;
this.nameBasalPattern1 = nameBasalPattern1;
}
public String getNameBasalPattern2() {
return nameBasalPattern2;
}
public void setNameBasalPattern2(String nameBasalPattern2) {
if (this.nameBasalPattern2 != null && !this.nameBasalPattern2.equals(nameBasalPattern2))
nameBasalPatternChanged = true;
this.nameBasalPattern2 = nameBasalPattern2;
}
public String getNameBasalPattern3() {
return nameBasalPattern3;
}
public void setNameBasalPattern3(String nameBasalPattern3) {
if (this.nameBasalPattern3 != null && !this.nameBasalPattern3.equals(nameBasalPattern3))
nameBasalPatternChanged = true;
this.nameBasalPattern3 = nameBasalPattern3;
}
public String getNameBasalPattern4() {
return nameBasalPattern4;
}
public void setNameBasalPattern4(String nameBasalPattern4) {
if (this.nameBasalPattern4 != null && !this.nameBasalPattern4.equals(nameBasalPattern4))
nameBasalPatternChanged = true;
this.nameBasalPattern4 = nameBasalPattern4;
}
public String getNameBasalPattern5() {
return nameBasalPattern5;
}
public void setNameBasalPattern5(String nameBasalPattern5) {
if (this.nameBasalPattern5 != null && !this.nameBasalPattern5.equals(nameBasalPattern5))
nameBasalPatternChanged = true;
this.nameBasalPattern5 = nameBasalPattern5;
}
public String getNameBasalPattern6() {
return nameBasalPattern6;
}
public void setNameBasalPattern6(String nameBasalPattern6) {
if (this.nameBasalPattern6 != null && !this.nameBasalPattern6.equals(nameBasalPattern6))
nameBasalPatternChanged = true;
this.nameBasalPattern6 = nameBasalPattern6;
}
public String getNameBasalPattern7() {
return nameBasalPattern7;
}
public void setNameBasalPattern7(String nameBasalPattern7) {
if (this.nameBasalPattern7 != null && !this.nameBasalPattern7.equals(nameBasalPattern7))
nameBasalPatternChanged = true;
this.nameBasalPattern7 = nameBasalPattern7;
}
public String getNameBasalPattern8() {
return nameBasalPattern8;
}
public void setNameBasalPattern8(String nameBasalPattern8) {
if (this.nameBasalPattern8 != null && !this.nameBasalPattern8.equals(nameBasalPattern8))
nameBasalPatternChanged = true;
this.nameBasalPattern8 = nameBasalPattern8;
}
public String getNameTempBasalPreset1() {
return nameTempBasalPreset1;
}
public void setNameTempBasalPreset1(String nameTempBasalPreset1) {
this.nameTempBasalPreset1 = nameTempBasalPreset1;
}
public String getNameTempBasalPreset2() {
return nameTempBasalPreset2;
}
public void setNameTempBasalPreset2(String nameTempBasalPreset2) {
this.nameTempBasalPreset2 = nameTempBasalPreset2;
}
public String getNameTempBasalPreset3() {
return nameTempBasalPreset3;
}
public void setNameTempBasalPreset3(String nameTempBasalPreset3) {
this.nameTempBasalPreset3 = nameTempBasalPreset3;
}
public String getNameTempBasalPreset4() {
return nameTempBasalPreset4;
}
public void setNameTempBasalPreset4(String nameTempBasalPreset4) {
this.nameTempBasalPreset4 = nameTempBasalPreset4;
}
public String getNameTempBasalPreset5() {
return nameTempBasalPreset5;
}
public void setNameTempBasalPreset5(String nameTempBasalPreset5) {
this.nameTempBasalPreset5 = nameTempBasalPreset5;
}
public String getNameTempBasalPreset6() {
return nameTempBasalPreset6;
}
public void setNameTempBasalPreset6(String nameTempBasalPreset6) {
this.nameTempBasalPreset6 = nameTempBasalPreset6;
}
public String getNameTempBasalPreset7() {
return nameTempBasalPreset7;
}
public void setNameTempBasalPreset7(String nameTempBasalPreset7) {
this.nameTempBasalPreset7 = nameTempBasalPreset7;
}
public String getNameTempBasalPreset8() {
return nameTempBasalPreset8;
}
public void setNameTempBasalPreset8(String nameTempBasalPreset8) {
this.nameTempBasalPreset8 = nameTempBasalPreset8;
}
public String getNameBolusPreset1() {
return nameBolusPreset1;
}
public void setNameBolusPreset1(String nameBolusPreset1) {
this.nameBolusPreset1 = nameBolusPreset1;
}
public String getNameBolusPreset2() {
return nameBolusPreset2;
}
public void setNameBolusPreset2(String nameBolusPreset2) {
this.nameBolusPreset2 = nameBolusPreset2;
}
public String getNameBolusPreset3() {
return nameBolusPreset3;
}
public void setNameBolusPreset3(String nameBolusPreset3) {
this.nameBolusPreset3 = nameBolusPreset3;
}
public String getNameBolusPreset4() {
return nameBolusPreset4;
}
public void setNameBolusPreset4(String nameBolusPreset4) {
this.nameBolusPreset4 = nameBolusPreset4;
}
public String getNameBolusPreset5() {
return nameBolusPreset5;
}
public void setNameBolusPreset5(String nameBolusPreset5) {
this.nameBolusPreset5 = nameBolusPreset5;
}
public String getNameBolusPreset6() {
return nameBolusPreset6;
}
public void setNameBolusPreset6(String nameBolusPreset6) {
this.nameBolusPreset6 = nameBolusPreset6;
}
public String getNameBolusPreset7() {
return nameBolusPreset7;
}
public void setNameBolusPreset7(String nameBolusPreset7) {
this.nameBolusPreset7 = nameBolusPreset7;
}
public String getNameBolusPreset8() {
return nameBolusPreset8;
}
public void setNameBolusPreset8(String nameBolusPreset8) {
this.nameBolusPreset8 = nameBolusPreset8;
}
public String getNameBasalPattern(int value) {
switch (value) {
case 1: return nameBasalPattern1;
case 2: return nameBasalPattern2;
case 3: return nameBasalPattern3;
case 4: return nameBasalPattern4;
case 5: return nameBasalPattern5;
case 6: return nameBasalPattern6;
case 7: return nameBasalPattern7;
case 8: return nameBasalPattern8;
}
return "";
}
public String getNameTempBasalPreset(int value) {
switch (value) {
case 1: return nameTempBasalPreset1;
case 2: return nameTempBasalPreset2;
case 3: return nameTempBasalPreset3;
case 4: return nameTempBasalPreset4;
case 5: return nameTempBasalPreset5;
case 6: return nameTempBasalPreset6;
case 7: return nameTempBasalPreset7;
case 8: return nameTempBasalPreset8;
}
return "";
}
public String getNameBolusPreset(int value) {
switch (value) {
case 1: return nameBolusPreset1;
case 2: return nameBolusPreset2;
case 3: return nameBolusPreset3;
case 4: return nameBolusPreset4;
case 5: return nameBolusPreset5;
case 6: return nameBolusPreset6;
case 7: return nameBolusPreset7;
case 8: return nameBolusPreset8;
}
return "";
}
public boolean isNameBasalPatternChanged() {
return nameBasalPatternChanged;
}
public void setNameBasalPatternChanged(boolean nameBasalPatternChanged) {
this.nameBasalPatternChanged = nameBasalPatternChanged;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datastore.chunk.reader.measure.v3;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.carbondata.core.datastore.FileReader;
import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
import org.apache.carbondata.core.datastore.chunk.reader.measure.AbstractMeasureChunkReaderV2V3Format;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
import org.apache.carbondata.core.datastore.page.ColumnPage;
import org.apache.carbondata.core.datastore.page.encoding.ColumnPageDecoder;
import org.apache.carbondata.core.memory.MemoryException;
import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
import org.apache.carbondata.core.scan.executor.util.QueryUtil;
import org.apache.carbondata.core.util.CarbonMetadataUtil;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.format.DataChunk2;
import org.apache.carbondata.format.DataChunk3;
import org.apache.carbondata.format.Encoding;
import org.apache.commons.lang.ArrayUtils;
/**
* Measure column V3 Reader class which will be used to read and uncompress
* V3 format data
* data format
* Data Format
* <FileHeader>
* <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>>
* <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>>
* <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>>
* <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>>
* <File Footer>
*/
public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChunkReaderV2V3Format {
/**
* end position of last measure in carbon data file
*/
private long measureOffsets;
public CompressedMeasureChunkFileBasedReaderV3(BlockletInfo blockletInfo, String filePath) {
super(blockletInfo, filePath);
measureOffsets = blockletInfo.getMeasureOffsets();
}
/**
* Below method will be used to read the measure column data form carbon data file
* 1. Get the length of the data to be read
* 2. Allocate the direct buffer
* 3. read the data from file
* 4. Get the data chunk object from data read
* 5. Create the raw chunk object and fill the details
*
* @param fileReader reader for reading the column from carbon data file
* @param columnIndex column to be read
* @return measure raw chunk
*/
@Override public MeasureRawColumnChunk readRawMeasureChunk(FileReader fileReader,
int columnIndex) throws IOException {
int dataLength = 0;
// to calculate the length of the data to be read
// column other than last column we can subtract the offset of current column with
// next column and get the total length.
// but for last column we need to use lastDimensionOffset which is the end position
// of the last dimension, we can subtract current dimension offset from lastDimensionOffset
if (measureColumnChunkOffsets.size() - 1 == columnIndex) {
dataLength = (int) (measureOffsets - measureColumnChunkOffsets.get(columnIndex));
} else {
dataLength =
(int) (measureColumnChunkOffsets.get(columnIndex + 1) - measureColumnChunkOffsets
.get(columnIndex));
}
ByteBuffer buffer = null;
// read the data from carbon data file
synchronized (fileReader) {
buffer = fileReader
.readByteBuffer(filePath, measureColumnChunkOffsets.get(columnIndex), dataLength);
}
// get the data chunk which will have all the details about the data pages
DataChunk3 dataChunk =
CarbonUtil.readDataChunk3(buffer, 0, measureColumnChunkLength.get(columnIndex));
return getMeasureRawColumnChunk(fileReader, columnIndex, 0, dataLength, buffer,
dataChunk);
}
MeasureRawColumnChunk getMeasureRawColumnChunk(FileReader fileReader, int columnIndex,
long offset, int dataLength, ByteBuffer buffer, DataChunk3 dataChunk) {
// creating a raw chunks instance and filling all the details
MeasureRawColumnChunk rawColumnChunk =
new MeasureRawColumnChunk(columnIndex, buffer, offset, dataLength, this);
int numberOfPages = dataChunk.getPage_length().size();
byte[][] maxValueOfEachPage = new byte[numberOfPages][];
byte[][] minValueOfEachPage = new byte[numberOfPages][];
int[] eachPageLength = new int[numberOfPages];
for (int i = 0; i < minValueOfEachPage.length; i++) {
maxValueOfEachPage[i] =
dataChunk.getData_chunk_list().get(i).getMin_max().getMax_values().get(0).array();
minValueOfEachPage[i] =
dataChunk.getData_chunk_list().get(i).getMin_max().getMin_values().get(0).array();
eachPageLength[i] = dataChunk.getData_chunk_list().get(i).getNumberOfRowsInpage();
}
rawColumnChunk.setDataChunkV3(dataChunk);
rawColumnChunk.setFileReader(fileReader);
rawColumnChunk.setPagesCount(dataChunk.getPage_length().size());
rawColumnChunk.setMaxValues(maxValueOfEachPage);
rawColumnChunk.setMinValues(minValueOfEachPage);
rawColumnChunk.setRowCount(eachPageLength);
rawColumnChunk.setOffsets(ArrayUtils
.toPrimitive(dataChunk.page_offset.toArray(new Integer[dataChunk.page_offset.size()])));
return rawColumnChunk;
}
/**
* Below method will be used to read the multiple measure column data in group
* and divide into measure raw chunk object
* Steps for reading
* 1. Get the length of the data to be read
* 2. Allocate the direct buffer
* 3. read the data from file
* 4. Get the data chunk object from file for each column
* 5. Create the raw chunk object and fill the details for each column
* 6. increment the offset of the data
*
* @param fileReader
* reader which will be used to read the measure columns data from file
* @param startColumnIndex
* column index of the first measure column
* @param endColumnIndex
* column index of the last measure column
* @return MeasureRawColumnChunk array
*/
protected MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileReader fileReader,
int startColumnIndex, int endColumnIndex) throws IOException {
// to calculate the length of the data to be read
// column we can subtract the offset of start column offset with
// end column+1 offset and get the total length.
long currentMeasureOffset = measureColumnChunkOffsets.get(startColumnIndex);
ByteBuffer buffer = null;
// read the data from carbon data file
synchronized (fileReader) {
buffer = fileReader.readByteBuffer(filePath, currentMeasureOffset,
(int) (measureColumnChunkOffsets.get(endColumnIndex + 1) - currentMeasureOffset));
}
// create raw chunk for each measure column
MeasureRawColumnChunk[] measureDataChunk =
new MeasureRawColumnChunk[endColumnIndex - startColumnIndex + 1];
int runningLength = 0;
int index = 0;
for (int i = startColumnIndex; i <= endColumnIndex; i++) {
int currentLength =
(int) (measureColumnChunkOffsets.get(i + 1) - measureColumnChunkOffsets.get(i));
DataChunk3 dataChunk =
CarbonUtil.readDataChunk3(buffer, runningLength, measureColumnChunkLength.get(i));
MeasureRawColumnChunk measureRawColumnChunk =
getMeasureRawColumnChunk(fileReader, i, runningLength, currentLength, buffer, dataChunk);
measureDataChunk[index] = measureRawColumnChunk;
runningLength += currentLength;
index++;
}
return measureDataChunk;
}
/**
* Below method will be used to convert the compressed measure chunk raw data to actual data
*
* @param rawColumnChunk measure raw chunk
* @param pageNumber number
* @return DimensionColumnPage
*/
@Override
public ColumnPage decodeColumnPage(
MeasureRawColumnChunk rawColumnChunk, int pageNumber)
throws IOException, MemoryException {
// data chunk of blocklet column
DataChunk3 dataChunk3 = rawColumnChunk.getDataChunkV3();
// data chunk of page
DataChunk2 pageMetadata = dataChunk3.getData_chunk_list().get(pageNumber);
String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(
pageMetadata.getChunk_meta());
this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
// calculating the start point of data
// as buffer can contain multiple column data, start point will be datachunkoffset +
// data chunk length + page offset
int offset = (int) rawColumnChunk.getOffSet() +
measureColumnChunkLength.get(rawColumnChunk.getColumnIndex()) +
dataChunk3.getPage_offset().get(pageNumber);
ColumnPage decodedPage = decodeMeasure(pageMetadata, rawColumnChunk.getRawData(), offset);
decodedPage.setNullBits(QueryUtil.getNullBitSet(pageMetadata.presence, this.compressor));
return decodedPage;
}
/**
* Decode measure column page with page header and raw data starting from offset
*/
protected ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset)
throws MemoryException, IOException {
List<Encoding> encodings = pageMetadata.getEncoders();
List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(
pageMetadata.getChunk_meta());
ColumnPageDecoder codec = encodingFactory.createDecoder(encodings, encoderMetas,
compressorName);
return codec.decode(pageData.array(), offset, pageMetadata.data_page_length);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.extension;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.extension.activate.ActivateExt1;
import org.apache.dubbo.common.extension.activate.impl.ActivateExt1Impl1;
import org.apache.dubbo.common.extension.activate.impl.GroupActivateExtImpl;
import org.apache.dubbo.common.extension.activate.impl.OldActivateExt1Impl2;
import org.apache.dubbo.common.extension.activate.impl.OldActivateExt1Impl3;
import org.apache.dubbo.common.extension.activate.impl.OrderActivateExtImpl1;
import org.apache.dubbo.common.extension.activate.impl.OrderActivateExtImpl2;
import org.apache.dubbo.common.extension.activate.impl.ValueActivateExtImpl;
import org.apache.dubbo.common.extension.ext1.SimpleExt;
import org.apache.dubbo.common.extension.ext1.impl.SimpleExtImpl1;
import org.apache.dubbo.common.extension.ext1.impl.SimpleExtImpl2;
import org.apache.dubbo.common.extension.ext10_multi_names.Ext10MultiNames;
import org.apache.dubbo.common.extension.ext2.Ext2;
import org.apache.dubbo.common.extension.ext6_wrap.WrappedExt;
import org.apache.dubbo.common.extension.ext6_wrap.impl.Ext5Wrapper1;
import org.apache.dubbo.common.extension.ext6_wrap.impl.Ext5Wrapper2;
import org.apache.dubbo.common.extension.ext7.InitErrorExt;
import org.apache.dubbo.common.extension.ext8_add.AddExt1;
import org.apache.dubbo.common.extension.ext8_add.AddExt2;
import org.apache.dubbo.common.extension.ext8_add.AddExt3;
import org.apache.dubbo.common.extension.ext8_add.AddExt4;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt1Impl1;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt1_ManualAdaptive;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt1_ManualAdd1;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt1_ManualAdd2;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt2_ManualAdaptive;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt3_ManualAdaptive;
import org.apache.dubbo.common.extension.ext8_add.impl.AddExt4_ManualAdaptive;
import org.apache.dubbo.common.extension.ext9_empty.Ext9Empty;
import org.apache.dubbo.common.extension.ext9_empty.impl.Ext9EmptyImpl;
import org.apache.dubbo.common.extension.injection.InjectExt;
import org.apache.dubbo.common.extension.injection.impl.InjectExtImpl;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.apache.dubbo.common.constants.CommonConstants.GROUP_KEY;
import static org.apache.dubbo.common.extension.ExtensionLoader.getExtensionLoader;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class ExtensionLoaderTest {
@Test
public void test_getExtensionLoader_Null() throws Exception {
try {
getExtensionLoader(null);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(),
containsString("Extension type == null"));
}
}
@Test
public void test_getExtensionLoader_NotInterface() throws Exception {
try {
getExtensionLoader(ExtensionLoaderTest.class);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(),
containsString("Extension type (class org.apache.dubbo.common.extension.ExtensionLoaderTest) is not an interface"));
}
}
@Test
public void test_getExtensionLoader_NotSpiAnnotation() throws Exception {
try {
getExtensionLoader(NoSpiExt.class);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(),
allOf(containsString("org.apache.dubbo.common.extension.NoSpiExt"),
containsString("is not an extension"),
containsString("NOT annotated with @SPI")));
}
}
@Test
public void test_getDefaultExtension() throws Exception {
SimpleExt ext = getExtensionLoader(SimpleExt.class).getDefaultExtension();
assertThat(ext, instanceOf(SimpleExtImpl1.class));
String name = getExtensionLoader(SimpleExt.class).getDefaultExtensionName();
assertEquals("impl1", name);
}
@Test
public void test_getDefaultExtension_NULL() throws Exception {
Ext2 ext = getExtensionLoader(Ext2.class).getDefaultExtension();
assertNull(ext);
String name = getExtensionLoader(Ext2.class).getDefaultExtensionName();
assertNull(name);
}
@Test
public void test_getExtension() throws Exception {
assertTrue(getExtensionLoader(SimpleExt.class).getExtension("impl1") instanceof SimpleExtImpl1);
assertTrue(getExtensionLoader(SimpleExt.class).getExtension("impl2") instanceof SimpleExtImpl2);
}
@Test
public void test_getExtension_WithWrapper() throws Exception {
WrappedExt impl1 = getExtensionLoader(WrappedExt.class).getExtension("impl1");
assertThat(impl1, anyOf(instanceOf(Ext5Wrapper1.class), instanceOf(Ext5Wrapper2.class)));
WrappedExt impl2 = getExtensionLoader(WrappedExt.class).getExtension("impl2");
assertThat(impl2, anyOf(instanceOf(Ext5Wrapper1.class), instanceOf(Ext5Wrapper2.class)));
URL url = new URL("p1", "1.2.3.4", 1010, "path1");
int echoCount1 = Ext5Wrapper1.echoCount.get();
int echoCount2 = Ext5Wrapper2.echoCount.get();
assertEquals("Ext5Impl1-echo", impl1.echo(url, "ha"));
assertEquals(echoCount1 + 1, Ext5Wrapper1.echoCount.get());
assertEquals(echoCount2 + 1, Ext5Wrapper2.echoCount.get());
}
@Test
public void test_getExtension_ExceptionNoExtension() throws Exception {
try {
getExtensionLoader(SimpleExt.class).getExtension("XXX");
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("No such extension org.apache.dubbo.common.extension.ext1.SimpleExt by name XXX"));
}
}
@Test
public void test_getExtension_ExceptionNoExtension_WrapperNotAffactName() throws Exception {
try {
getExtensionLoader(WrappedExt.class).getExtension("XXX");
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("No such extension org.apache.dubbo.common.extension.ext6_wrap.WrappedExt by name XXX"));
}
}
@Test
public void test_getExtension_ExceptionNullArg() throws Exception {
try {
getExtensionLoader(SimpleExt.class).getExtension(null);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(), containsString("Extension name == null"));
}
}
@Test
public void test_hasExtension() throws Exception {
assertTrue(getExtensionLoader(SimpleExt.class).hasExtension("impl1"));
assertFalse(getExtensionLoader(SimpleExt.class).hasExtension("impl1,impl2"));
assertFalse(getExtensionLoader(SimpleExt.class).hasExtension("xxx"));
try {
getExtensionLoader(SimpleExt.class).hasExtension(null);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(), containsString("Extension name == null"));
}
}
@Test
public void test_hasExtension_wrapperIsNotExt() throws Exception {
assertTrue(getExtensionLoader(WrappedExt.class).hasExtension("impl1"));
assertFalse(getExtensionLoader(WrappedExt.class).hasExtension("impl1,impl2"));
assertFalse(getExtensionLoader(WrappedExt.class).hasExtension("xxx"));
assertFalse(getExtensionLoader(WrappedExt.class).hasExtension("wrapper1"));
try {
getExtensionLoader(WrappedExt.class).hasExtension(null);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(), containsString("Extension name == null"));
}
}
@Test
public void test_getSupportedExtensions() throws Exception {
Set<String> exts = getExtensionLoader(SimpleExt.class).getSupportedExtensions();
Set<String> expected = new HashSet<String>();
expected.add("impl1");
expected.add("impl2");
expected.add("impl3");
assertEquals(expected, exts);
}
@Test
public void test_getSupportedExtensions_wrapperIsNotExt() throws Exception {
Set<String> exts = getExtensionLoader(WrappedExt.class).getSupportedExtensions();
Set<String> expected = new HashSet<String>();
expected.add("impl1");
expected.add("impl2");
assertEquals(expected, exts);
}
@Test
public void test_AddExtension() throws Exception {
try {
getExtensionLoader(AddExt1.class).getExtension("Manual1");
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("No such extension org.apache.dubbo.common.extension.ext8_add.AddExt1 by name Manual"));
}
getExtensionLoader(AddExt1.class).addExtension("Manual1", AddExt1_ManualAdd1.class);
AddExt1 ext = getExtensionLoader(AddExt1.class).getExtension("Manual1");
assertThat(ext, instanceOf(AddExt1_ManualAdd1.class));
assertEquals("Manual1", getExtensionLoader(AddExt1.class).getExtensionName(AddExt1_ManualAdd1.class));
}
@Test
public void test_AddExtension_NoExtend() throws Exception {
// ExtensionLoader.getExtensionLoader(Ext9Empty.class).getSupportedExtensions();
getExtensionLoader(Ext9Empty.class).addExtension("ext9", Ext9EmptyImpl.class);
Ext9Empty ext = getExtensionLoader(Ext9Empty.class).getExtension("ext9");
assertThat(ext, instanceOf(Ext9Empty.class));
assertEquals("ext9", getExtensionLoader(Ext9Empty.class).getExtensionName(Ext9EmptyImpl.class));
}
@Test
public void test_AddExtension_ExceptionWhenExistedExtension() throws Exception {
SimpleExt ext = getExtensionLoader(SimpleExt.class).getExtension("impl1");
try {
getExtensionLoader(AddExt1.class).addExtension("impl1", AddExt1_ManualAdd1.class);
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("Extension name impl1 already exists (Extension interface org.apache.dubbo.common.extension.ext8_add.AddExt1)!"));
}
}
@Test
public void test_AddExtension_Adaptive() throws Exception {
ExtensionLoader<AddExt2> loader = getExtensionLoader(AddExt2.class);
loader.addExtension(null, AddExt2_ManualAdaptive.class);
AddExt2 adaptive = loader.getAdaptiveExtension();
assertTrue(adaptive instanceof AddExt2_ManualAdaptive);
}
@Test
public void test_AddExtension_Adaptive_ExceptionWhenExistedAdaptive() throws Exception {
ExtensionLoader<AddExt1> loader = getExtensionLoader(AddExt1.class);
loader.getAdaptiveExtension();
try {
loader.addExtension(null, AddExt1_ManualAdaptive.class);
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("Adaptive Extension already exists (Extension interface org.apache.dubbo.common.extension.ext8_add.AddExt1)!"));
}
}
@Test
public void test_replaceExtension() throws Exception {
try {
getExtensionLoader(AddExt1.class).getExtension("Manual2");
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("No such extension org.apache.dubbo.common.extension.ext8_add.AddExt1 by name Manual"));
}
{
AddExt1 ext = getExtensionLoader(AddExt1.class).getExtension("impl1");
assertThat(ext, instanceOf(AddExt1Impl1.class));
assertEquals("impl1", getExtensionLoader(AddExt1.class).getExtensionName(AddExt1Impl1.class));
}
{
getExtensionLoader(AddExt1.class).replaceExtension("impl1", AddExt1_ManualAdd2.class);
AddExt1 ext = getExtensionLoader(AddExt1.class).getExtension("impl1");
assertThat(ext, instanceOf(AddExt1_ManualAdd2.class));
assertEquals("impl1", getExtensionLoader(AddExt1.class).getExtensionName(AddExt1_ManualAdd2.class));
}
}
@Test
public void test_replaceExtension_Adaptive() throws Exception {
ExtensionLoader<AddExt3> loader = getExtensionLoader(AddExt3.class);
AddExt3 adaptive = loader.getAdaptiveExtension();
assertFalse(adaptive instanceof AddExt3_ManualAdaptive);
loader.replaceExtension(null, AddExt3_ManualAdaptive.class);
adaptive = loader.getAdaptiveExtension();
assertTrue(adaptive instanceof AddExt3_ManualAdaptive);
}
@Test
public void test_replaceExtension_ExceptionWhenNotExistedExtension() throws Exception {
AddExt1 ext = getExtensionLoader(AddExt1.class).getExtension("impl1");
try {
getExtensionLoader(AddExt1.class).replaceExtension("NotExistedExtension", AddExt1_ManualAdd1.class);
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("Extension name NotExistedExtension doesn't exist (Extension interface org.apache.dubbo.common.extension.ext8_add.AddExt1)"));
}
}
@Test
public void test_replaceExtension_Adaptive_ExceptionWhenNotExistedExtension() throws Exception {
ExtensionLoader<AddExt4> loader = getExtensionLoader(AddExt4.class);
try {
loader.replaceExtension(null, AddExt4_ManualAdaptive.class);
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("Adaptive Extension doesn't exist (Extension interface org.apache.dubbo.common.extension.ext8_add.AddExt4)"));
}
}
@Test
public void test_InitError() throws Exception {
ExtensionLoader<InitErrorExt> loader = getExtensionLoader(InitErrorExt.class);
loader.getExtension("ok");
try {
loader.getExtension("error");
fail();
} catch (IllegalStateException expected) {
assertThat(expected.getMessage(), containsString("Failed to load extension class (interface: interface org.apache.dubbo.common.extension.ext7.InitErrorExt"));
assertThat(expected.getCause(), instanceOf(ExceptionInInitializerError.class));
}
}
@Test
public void testLoadActivateExtension() throws Exception {
// test default
URL url = URL.valueOf("test://localhost/test");
List<ActivateExt1> list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, new String[]{}, "default_group");
Assertions.assertEquals(1, list.size());
Assertions.assertSame(list.get(0).getClass(), ActivateExt1Impl1.class);
// test group
url = url.addParameter(GROUP_KEY, "group1");
list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, new String[]{}, "group1");
Assertions.assertEquals(1, list.size());
Assertions.assertSame(list.get(0).getClass(), GroupActivateExtImpl.class);
// test old @Activate group
url = url.addParameter(GROUP_KEY, "old_group");
list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, new String[]{}, "old_group");
Assertions.assertEquals(2, list.size());
Assertions.assertTrue(list.get(0).getClass() == OldActivateExt1Impl2.class
|| list.get(0).getClass() == OldActivateExt1Impl3.class);
// test value
url = url.removeParameter(GROUP_KEY);
url = url.addParameter(GROUP_KEY, "value");
url = url.addParameter("value", "value");
list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, new String[]{}, "value");
Assertions.assertEquals(1, list.size());
Assertions.assertSame(list.get(0).getClass(), ValueActivateExtImpl.class);
// test order
url = URL.valueOf("test://localhost/test");
url = url.addParameter(GROUP_KEY, "order");
list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, new String[]{}, "order");
Assertions.assertEquals(2, list.size());
Assertions.assertSame(list.get(0).getClass(), OrderActivateExtImpl1.class);
Assertions.assertSame(list.get(1).getClass(), OrderActivateExtImpl2.class);
}
@Test
public void testLoadDefaultActivateExtension() throws Exception {
// test default
URL url = URL.valueOf("test://localhost/test?ext=order1,default");
List<ActivateExt1> list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, "ext", "default_group");
Assertions.assertEquals(2, list.size());
Assertions.assertSame(list.get(0).getClass(), OrderActivateExtImpl1.class);
Assertions.assertSame(list.get(1).getClass(), ActivateExt1Impl1.class);
url = URL.valueOf("test://localhost/test?ext=default,order1");
list = getExtensionLoader(ActivateExt1.class)
.getActivateExtension(url, "ext", "default_group");
Assertions.assertEquals(2, list.size());
Assertions.assertSame(list.get(0).getClass(), ActivateExt1Impl1.class);
Assertions.assertSame(list.get(1).getClass(), OrderActivateExtImpl1.class);
}
@Test
public void testInjectExtension() {
// test default
InjectExt injectExt = getExtensionLoader(InjectExt.class).getExtension("injection");
InjectExtImpl injectExtImpl = (InjectExtImpl) injectExt;
Assertions.assertNotNull(injectExtImpl.getSimpleExt());
Assertions.assertNull(injectExtImpl.getSimpleExt1());
Assertions.assertNull(injectExtImpl.getGenericType());
}
@Test
void testMultiNames() {
Ext10MultiNames ext10MultiNames = getExtensionLoader(Ext10MultiNames.class).getExtension("impl");
Assertions.assertNotNull(ext10MultiNames);
ext10MultiNames = getExtensionLoader(Ext10MultiNames.class).getExtension("implMultiName");
Assertions.assertNotNull(ext10MultiNames);
Assertions.assertThrows(
IllegalStateException.class,
() -> getExtensionLoader(Ext10MultiNames.class).getExtension("impl,implMultiName")
);
}
@Test
public void testGetOrDefaultExtension() {
ExtensionLoader<InjectExt> loader = getExtensionLoader(InjectExt.class);
InjectExt injectExt = loader.getOrDefaultExtension("non-exists");
assertEquals(InjectExtImpl.class, injectExt.getClass());
assertEquals(InjectExtImpl.class, loader.getOrDefaultExtension("injection").getClass());
}
@Test
public void testGetSupported() {
ExtensionLoader<InjectExt> loader = getExtensionLoader(InjectExt.class);
assertEquals(1, loader.getSupportedExtensions().size());
assertEquals(Collections.singleton("injection"), loader.getSupportedExtensions());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @author Anton V. Karnachuk
* @version $Revision: 1.2 $
*/
/**
* Created on 16.03.2005
*/
package org.apache.harmony.share.framework.jpda.jdwp;
import java.util.List;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import org.apache.harmony.share.framework.jpda.LogWriter;
import org.apache.harmony.share.framework.jpda.TestOptions;
import org.apache.harmony.share.framework.jpda.jdwp.exceptions.TimeoutException;
/**
* This class provides asynchronous sending JDWP commands and receiving JDWP
* events through established JDWP connection and supports timeout for these
* operations.
*/
public class PacketDispatcher extends Thread {
/**
* Variables below are intended only to help with tests failures
* investigation. They turn on/off some kinds of trace during
* tests execution which can clear up details of test failure.
*
* commandsNumberForTrace and begCommandIdForTrace define trace
* of sent JDWP commands and recceived replies for these commands:
* - begCommandIdForTrace defines starting command ID for trace
* (the first command has ID=1, the second - ID=2 and so on).
* if <= 0 then the same as = 1.
* - commandsNumberForTrace defines number of command for trace.
* if <= 0 then commands' trace is off.
*
* - eventRequestIDForTrace defines trace of received events
* according to request ID value:
* if < 0 then this trace is off;
* if = 0 then trace is for all received events;
* if > 0 then trace is for received events, which are triggered
* by this specified request ID value;
*
* - eventKindForTrace defines trace of received events
* according to this specified kind of event.
* if = 0 then this trace is off;
* See JDWPConstants.EventKind class for values of
* event kinds.
*/
int begCommandIdForTrace = 1;
int commandsNumberForTrace = 0;
int eventRequestIDForTrace = -1;
byte eventKindForTrace = 0;
/**
* Internal class to synchronize jdwp events. When an event is recieved it
* is stored in eventQueue. If there are any thread that waits for event it
* is notified.
*/
private class EventsSynchronyzer {
// List of recieved events
private List eventQueue;
// default constructor
EventsSynchronyzer() {
// initialize eventQueue
eventQueue = new ArrayList();
}
// notify thread that the new event has been received
public void notifyThread(EventPacket eventPacket)
throws InterruptedException {
// use this object as lock
synchronized (this) {
// add the event to eventQueue
eventQueue.add(eventPacket);
// notify next waiting thread
this.notify();
}
}
// waits for new event during timeout.
// throws TimeoutException if no event was received
public EventPacket waitForNextEvent(long timeout)
throws InterruptedException, IOException {
// use this object as lock
synchronized (this) {
// if there is already received event in eventQueue, then return
// it
synchronized (eventQueue) {
if (!eventQueue.isEmpty()) {
return (EventPacket) eventQueue.remove(0);
}
// if eventQueue is empty and connection is already closed
// reraise the exception
if (connectionException != null)
throw connectionException;
}
// wait for the next event
this.wait(timeout);
// We have the following opportunities here -
// next event was recieved, exception in main cyrcle or timeout
// happens
synchronized (eventQueue) {
if (!eventQueue.isEmpty()) {
// event received
EventPacket event = (EventPacket) eventQueue.remove(0);
return event;
}
if (connectionException != null) {
// if eventQueue is empty and connection is already
// closed
// reraise the exception
throw connectionException;
}
}
}
// no events were occured during timeout
throw new TimeoutException(false);
}
// This method is called when connection is closed.
// It notifies all the waiting threads
public void terminate() {
synchronized (this) {
this.notifyAll();
}
}
}
/**
* Internal class to synchronize jdwp commands. It sends command packets
* through connection and returns replies.
*/
class CommandsSynchronyzer {
private int commandId;
private Hashtable commands;
private Hashtable replies;
// default constructor
CommandsSynchronyzer() {
commands = new Hashtable();
replies = new Hashtable();
// set first command id to 1
commandId = 1;
}
// increase the next commandId value by 1
private synchronized int getNextId() {
return commandId++;
}
// notify thread that reply packet was received
public void notifyThread(ReplyPacket replyPacket) throws IOException,
InterruptedException {
synchronized (commands) {
// obtain the current command id
Integer Id = new Integer(replyPacket.getId());
// obtain the current command packet by command id
CommandPacket command = (CommandPacket) commands.remove(Id);
if (command == null) {
// we received reply's id that does not correspond to any
// command
throw new IOException(
"Reply id is corresponded to no command. Id = "
+ Id);
}
synchronized (command) {
// put the reply in replies queue
synchronized (replies) {
replies.put(new Integer(replyPacket.getId()), replyPacket);
}
// notify waiting thread
command.notifyAll();
}
}
}
// send command and wait for the reply during timeout.
// throws TimeoutException if no reply was received
public ReplyPacket waitForReply(CommandPacket command, long timeout)
throws InterruptedException, IOException {
synchronized (command) {
// if connection is already closed reraise the exception
if (connectionException != null)
throw connectionException;
// obtain new command id
Integer Id = new Integer(getNextId());
command.setId(Id.intValue());
// add command into commands hashtable
synchronized (commands) {
commands.put(Id, command);
// below is trace for sent coomasnds
if ( commandsNumberForTrace > 0 ) {
int begCommandId = begCommandIdForTrace > 1 ? begCommandIdForTrace : 1;
if ( Id.intValue() >= begCommandId ) {
if ( (Id.intValue() - begCommandId) < commandsNumberForTrace ) {
logWriter.println
(">>>>>>>>>> PacketDispatcher: PERFORM command: ID = " + Id.intValue()+
"; CommandSet = " + command.getCommandSet()+
"; Command = " + command.getCommand()+"...");
}
}
}
// write this package to connection
connection.writePacket(command.toBytesArray());
}
// if connection is already closed reraise the exception
if (connectionException != null)
throw connectionException;
// wait for reply
command.wait(timeout);
// receive the reply
ReplyPacket currentReply = null;
synchronized (replies) {
currentReply = (ReplyPacket) replies.remove(Id);
}
// if reply is ok, return it
if (currentReply != null) {
return currentReply;
}
// if connection is already closed reraise the exception
if (connectionException != null)
throw connectionException;
}
// no event was occured during timeout
throw new TimeoutException(false);
}
// sends command without waiting for the reply
// and returns command ID
public int sendCommand(CommandPacket command)
throws IOException {
// if connection is already closed reraise the exception
if (connectionException != null)
throw connectionException;
// obtain new command id
Integer Id = new Integer(getNextId());
command.setId(Id.intValue());
// add command into commands hashtable
synchronized (commands) {
commands.put(Id, command);
// below is trace for sent coomasnds
if ( commandsNumberForTrace > 0 ) {
int begCommandId = begCommandIdForTrace > 1 ? begCommandIdForTrace : 1;
if ( Id.intValue() >= begCommandId ) {
if ( (Id.intValue() - begCommandId) < commandsNumberForTrace ) {
logWriter.println
(">>>>>>>>>> PacketDispatcher: PERFORM command: ID = " + Id.intValue()+
"; CommandSet = " + command.getCommandSet()+
"; Command = " + command.getCommand()+"...");
}
}
}
// write this package to connection
connection.writePacket(command.toBytesArray());
}
// if connection is already closed reraise the exception
if (connectionException != null) {
throw connectionException;
}
return Id.intValue();
}
// receive the reply during timeout for command with specified command ID.
// throws TimeoutException if no reply was received
public ReplyPacket receiveReply(int commandId, long timeout)
throws InterruptedException, IOException {
// if connection is already closed reraise the exception
if (connectionException != null)
throw connectionException;
// receive the reply
ReplyPacket currentReply = null;
long endTimeMlsecForWait = System.currentTimeMillis() + timeout;
synchronized (replies) {
while ( true ) {
currentReply = (ReplyPacket) replies.remove(new Integer(commandId));
// if reply is ok, return it
if (currentReply != null) {
return currentReply;
}
// if connection is already closed reraise the exception
if (connectionException != null) {
throw connectionException;
}
if ( System.currentTimeMillis() >= endTimeMlsecForWait ) {
break;
}
replies.wait(100);
}
}
// no expected reply was found during timeout
throw new TimeoutException(false);
}
// This method is called when connection is closed.
// It notifies all the waiting threads
public void terminate() {
synchronized (commands) {
// enumerate all waiting commands
for (Enumeration en = commands.keys(); en.hasMoreElements();) {
CommandPacket command = (CommandPacket) commands.get(en
.nextElement());
synchronized (command) {
// notify the waiting object
command.notifyAll();
}
}
}
}
}
/** Transport which is used to sent and recieve packets. */
private TransportWrapper connection;
/** Current test run configuration. */
TestOptions config;
private CommandsSynchronyzer commandsSynchronyzer;
private EventsSynchronyzer eventsSynchronyzer;
private LogWriter logWriter;
private IOException connectionException;
/**
* Creates new PacketDispatcher instance.
*
* @param connection
* open connection for reading and writing packets
* @param config
* test run options
* @param logWriter
* LogWriter object
*/
public PacketDispatcher(TransportWrapper connection, TestOptions config,
LogWriter logWriter) {
this.connection = connection;
this.config = config;
this.logWriter = logWriter;
commandsSynchronyzer = new CommandsSynchronyzer();
eventsSynchronyzer = new EventsSynchronyzer();
// make thread daemon
setDaemon(true);
// start the thread
start();
}
/**
* Reads packets from connection and dispatches them between waiting
* threads.
*/
public void run() {
connectionException = null;
try {
// start listening for replies
while (!isInterrupted()) {
// read packet from transport
byte[] packet = connection.readPacket();
// break cycle if empty packet
if (packet == null || packet.length == 0)
break;
// check flags
if (packet.length < Packet.FLAGS_INDEX) {
logWriter.println
(">>>>>>>>>> PacketDispatcher WARNING: WRONG received packet size = " + packet.length);
} else {
int flag = packet[Packet.FLAGS_INDEX] & 0xFF;
if ( flag != 0 ) {
if ( flag != Packet.REPLY_PACKET_FLAG) {
logWriter.println
(">>>>>>>>>> PacketDispatcher WARNING: WRONG received packet flags = " +
Integer.toHexString(flag));
}
}
}
// check the reply flag
if (Packet.isReply(packet)) {
// new reply
ReplyPacket replyPacket = new ReplyPacket(packet);
// check for received reply packet length
int packetLength = replyPacket.getLength();
if ( packetLength < Packet.HEADER_SIZE) {
logWriter.println
(">>>>>>>>>> PacketDispatcher WARNING: WRONG received packet length = " +
packetLength);
}
// below is trace for received coomasnds
if ( commandsNumberForTrace > 0 ) {
int replyID = replyPacket.getId();
int begCommandId = begCommandIdForTrace > 1 ? begCommandIdForTrace : 1;
if ( replyID >= begCommandId ) {
if ( (replyID - begCommandId) < commandsNumberForTrace ) {
logWriter.println
(">>>>>>>>>> PacketDispatcher: Received REPLY ID = " + replyID);
}
}
}
commandsSynchronyzer.notifyThread(replyPacket);
} else {
// new event
EventPacket eventPacket = new EventPacket(packet);
// below is to check received events for correctness
// below is trace for received events
ParsedEvent[] parsedEvents = ParsedEvent.parseEventPacket(eventPacket);
if ( (eventRequestIDForTrace >= 0) || (eventKindForTrace > 0 ) ) {
for ( int i=0; i < parsedEvents.length; i++ ) {
boolean trace = false;
int eventRequestID = parsedEvents[i].getRequestID();
if ( eventRequestIDForTrace == 0 ) {
trace = true;
} else {
if ( eventRequestID == eventRequestIDForTrace ) {
trace = true;
}
}
byte eventKind = parsedEvents[i].getEventKind();
if ( eventKind == eventKindForTrace ) {
trace = true;
}
if ( trace ) {
logWriter.println
(">>>>>>>>>> PacketDispatcher: Received_EVENT[" + i + "]: eventRequestID= " +
eventRequestID + "; eventKind = " + eventKind + "(" +
JDWPConstants.EventKind.getName(eventKind) + ")");
}
}
}
eventsSynchronyzer.notifyThread(eventPacket);
}
}
// this exception is send for all waiting threads
connectionException = new TimeoutException(true);
} catch (IOException e) {
// connection exception is send for all waiting threads
connectionException = e;
// print stack trace
e.printStackTrace();
} catch (InterruptedException e) {
// connection exception is send for all waiting threads
connectionException = new InterruptedIOException(e.getMessage());
connectionException.initCause(e);
// print stack trace
e.printStackTrace();
}
// notify all the waiting threads
eventsSynchronyzer.terminate();
commandsSynchronyzer.terminate();
}
/**
* Receives event from event queue if there are any events or waits during
* timeout for any event occurrence. This method should not be used
* simultaneously from different threads. If there were no reply during the
* timeout, TimeoutException is thrown.
*
* @param timeout
* timeout in milliseconds
* @return received event packet
* @throws IOException
* is any connection error occured
* @throws InterruptedException
* if reading packet was interrupted
* @throws TimeoutException
* if timeout exceeded
*/
public EventPacket receiveEvent(long timeout) throws IOException,
InterruptedException, TimeoutException {
return eventsSynchronyzer.waitForNextEvent(timeout);
}
/**
* Sends JDWP command packet and waits for reply packet during default
* timeout. If there were no reply packet during the timeout,
* TimeoutException is thrown.
*
* @return received reply packet
* @throws InterruptedException
* if reading packet was interrupted
* @throws IOException
* if any connection error occured
* @throws TimeoutException
* if timeout exceeded
*/
public ReplyPacket performCommand(CommandPacket command)
throws InterruptedException, IOException, TimeoutException {
return performCommand(command, config.getTimeout());
}
/**
* Sends JDWP command packet and waits for reply packet during certain
* timeout. If there were no reply packet during the timeout,
* TimeoutException is thrown.
*
* @param command
* command packet to send
* @param timeout
* timeout in milliseconds
* @return received reply packet
* @throws InterruptedException
* if packet reading was interrupted
* @throws IOException
* if any connection error occured
* @throws TimeoutException
* if timeout exceeded
*/
public ReplyPacket performCommand(CommandPacket command, long timeout)
throws InterruptedException, IOException, TimeoutException {
return commandsSynchronyzer.waitForReply(command, timeout);
}
/**
* Sends CommandPacket to debuggee VM without waiting for the reply.
* This method is intended for special cases when there is need to
* divide command's performing into two actions: command's sending
* and receiving reply (e.g. for asynchronous JDWP commands' testing).
* After this method the 'receiveReply()' method must be used latter for
* receving reply for sent command.
* It is NOT recommended to use this method for usual cases -
* 'performCommand()' method must be used.
*
* @param command
* Command packet to be sent
* @return command ID of sent command
* @throws IOException
* if any connection error occured
*/
public int sendCommand(CommandPacket command)
throws IOException {
return commandsSynchronyzer.sendCommand(command);
}
/**
* Waits for reply for command which was sent before by 'sendCommand()'
* method. Specified timeout is used as time limit for waiting.
* This method (jointly with 'sendCommand()') is intended for special
* cases when there is need to divide command's performing into two
* actions: command's sending and receiving reply (e.g. for asynchronous
* JDWP commands' testing).
* It is NOT recommended to use 'sendCommand()- receiveReply()' pair
* for usual cases - 'performCommand()' method must be used.
*
* @param commandId
* Command ID of sent before command, reply from which
* is expected to be received
* @param timeout
* Specified timeout in milliseconds to wait for reply
* @return received ReplyPacket
* @throws IOException
* if any connection error occured
* @throws InterruptedException
* if reply packet's waiting was interrupted
* @throws TimeoutException
* if timeout exceeded
*/
public ReplyPacket receiveReply(int commandId, long timeout)
throws InterruptedException, IOException, TimeoutException {
return commandsSynchronyzer.receiveReply(commandId, timeout);
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.typeconverter.impl;
import jodd.typeconverter.TypeConverter;
import jodd.typeconverter.TypeConverterManagerBean;
import jodd.util.StringUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Converts given object to <code>float[]</code>.
*/
public class FloatArrayConverter implements TypeConverter<float[]> {
protected final TypeConverterManagerBean typeConverterManagerBean;
public FloatArrayConverter(TypeConverterManagerBean typeConverterManagerBean) {
this.typeConverterManagerBean = typeConverterManagerBean;
}
public float[] convert(Object value) {
if (value == null) {
return null;
}
Class valueClass = value.getClass();
if (valueClass.isArray() == false) {
// source is not an array
return convertValueToArray(value);
}
// source is an array
return convertArrayToArray(value);
}
/**
* Converts type using type converter manager.
*/
protected float convertType(Object value) {
return typeConverterManagerBean.convertType(value, float.class).floatValue();
}
/**
* Creates an array with single element.
*/
protected float[] convertToSingleElementArray(Object value) {
return new float[] {convertType(value)};
}
/**
* Converts non-array value to array. Detects various
* collection types and iterates them to make conversion
* and to create target array.
*/
protected float[] convertValueToArray(Object value) {
if (value instanceof List) {
List list = (List) value;
float[] target = new float[list.size()];
for (int i = 0; i < list.size(); i++) {
Object element = list.get(i);
target[i] = convertType(element);
}
return target;
}
if (value instanceof Collection) {
Collection collection = (Collection) value;
float[] target = new float[collection.size()];
int i = 0;
for (Object element : collection) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof Iterable) {
Iterable iterable = (Iterable) value;
ArrayList<Float> floatArrayList = new ArrayList<>();
for (Object element : iterable) {
float convertedValue = convertType(element);
floatArrayList.add(Float.valueOf(convertedValue));
}
float[] array = new float[floatArrayList.size()];
for (int i = 0; i < floatArrayList.size(); i++) {
Float f = floatArrayList.get(i);
array[i] = f.floatValue();
}
return array;
}
if (value instanceof CharSequence) {
String[] strings = StringUtil.splitc(value.toString(), ArrayConverter.NUMBER_DELIMITERS);
return convertArrayToArray(strings);
}
// everything else:
return convertToSingleElementArray(value);
}
/**
* Converts array value to array.
*/
protected float[] convertArrayToArray(Object value) {
Class valueComponentType = value.getClass().getComponentType();
if (valueComponentType == float.class) {
// equal types, no conversion needed
return (float[]) value;
}
float[] result;
if (valueComponentType.isPrimitive()) {
// convert primitive array to target array
result = convertPrimitiveArrayToArray(value, valueComponentType);
} else {
// convert object array to target array
Object[] array = (Object[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = convertType(array[i]);
}
}
return result;
}
/**
* Converts primitive array to target array.
*/
protected float[] convertPrimitiveArrayToArray(Object value, Class primitiveComponentType) {
float[] result = null;
if (primitiveComponentType == float[].class) {
return (float[]) value;
}
if (primitiveComponentType == int.class) {
int[] array = (int[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == long.class) {
long[] array = (long[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == double.class) {
double[] array = (double[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = (float) array[i];
}
}
else if (primitiveComponentType == short.class) {
short[] array = (short[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == byte.class) {
byte[] array = (byte[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == char.class) {
char[] array = (char[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i];
}
}
else if (primitiveComponentType == boolean.class) {
boolean[] array = (boolean[]) value;
result = new float[array.length];
for (int i = 0; i < array.length; i++) {
result[i] = array[i] ? 1 : 0;
}
}
return result;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins.cl;
import com.intellij.diagnostic.PluginException;
import com.intellij.ide.plugins.PluginManagerCore;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.ContainerUtilRt;
import com.intellij.util.lang.UrlClassLoader;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.*;
/**
* @author Eugene Zhuravlev
* @since 6.03.2003
*/
public class PluginClassLoader extends UrlClassLoader {
private final ClassLoader[] myParents;
private final PluginId myPluginId;
private final String myPluginVersion;
private final List<String> myLibDirectories;
public PluginClassLoader(@NotNull List<URL> urls,
@NotNull ClassLoader[] parents,
PluginId pluginId,
String version,
File pluginRoot) {
super(build().urls(urls).allowLock().useCache());
myParents = parents;
myPluginId = pluginId;
myPluginVersion = version;
myLibDirectories = ContainerUtil.newSmartList();
File libDir = new File(pluginRoot, "lib");
if (libDir.exists()) {
myLibDirectories.add(libDir.getAbsolutePath());
}
}
@Override
public Class loadClass(@NotNull String name, boolean resolve) throws ClassNotFoundException {
Class c = tryLoadingClass(name, resolve, null);
if (c == null) {
throw new ClassNotFoundException(name + " " + this);
}
return c;
}
// Changed sequence in which classes are searched, this is essential if plugin uses library,
// a different version of which is used in IDEA.
@Nullable
private Class tryLoadingClass(@NotNull String name, boolean resolve, @Nullable Set<ClassLoader> visited) {
Class c = null;
if (!mustBeLoadedByPlatform(name)) {
c = loadClassInsideSelf(name);
}
if (c == null) {
c = loadClassFromParents(name, visited);
}
if (c != null) {
if (resolve) {
resolveClass(c);
}
return c;
}
return null;
}
private static final Set<String> KOTLIN_STDLIB_CLASSES_USED_IN_SIGNATURES = ContainerUtil.set(
"kotlin.sequences.Sequence",
"kotlin.Unit",
"kotlin.Pair",
"kotlin.Triple",
"kotlin.jvm.internal.DefaultConstructorMarker",
"kotlin.properties.ReadWriteProperty",
"kotlin.properties.ReadOnlyProperty"
);
private static boolean mustBeLoadedByPlatform(String className) {
//some commonly used classes from kotlin-runtime must be loaded by the platform classloader. Otherwise if a plugin bundles its own version
// of kotlin-runtime.jar it won't be possible to call platform's methods with these types in signatures from such a plugin.
//We assume that these classes don't change between Kotlin versions so it's safe to always load them from platform's kotlin-runtime.
return className.startsWith("kotlin.") && (className.startsWith("kotlin.jvm.functions.") ||
className.startsWith("kotlin.reflect.") ||
className.startsWith("kotlin.jvm.internal.") ||
KOTLIN_STDLIB_CLASSES_USED_IN_SIGNATURES.contains(className));
}
@Nullable
private Class loadClassFromParents(String name, Set<ClassLoader> visited) {
for (ClassLoader parent : myParents) {
if (visited == null) visited = ContainerUtilRt.newHashSet(this);
if (!visited.add(parent)) {
continue;
}
if (parent instanceof PluginClassLoader) {
Class c = ((PluginClassLoader)parent).tryLoadingClass(name, false, visited);
if (c != null) {
return c;
}
continue;
}
try {
return parent.loadClass(name);
}
catch (ClassNotFoundException ignoreAndContinue) {
// Ignore and continue
}
}
return null;
}
@Nullable
private synchronized Class loadClassInsideSelf(@NotNull String name) {
Class c = findLoadedClass(name);
if (c != null) {
return c;
}
try {
c = _findClass(name);
}
catch (IncompatibleClassChangeError | UnsupportedClassVersionError e) {
throw new PluginException("While loading class " + name + ": " + e.getMessage(), e, myPluginId);
}
if (c != null) {
PluginManagerCore.addPluginClass(myPluginId);
}
return c;
}
@Override
public URL findResource(String name) {
URL resource = super.findResource(name);
if (resource != null) return resource;
for (ClassLoader parent : myParents) {
URL parentResource = parent.getResource(name);
if (parentResource != null) return parentResource;
}
return null;
}
@Override
public InputStream getResourceAsStream(String name) {
InputStream stream = super.getResourceAsStream(name);
if (stream != null) return stream;
for (ClassLoader parent : myParents) {
InputStream inputStream = parent.getResourceAsStream(name);
if (inputStream != null) return inputStream;
}
return null;
}
@Override
public Enumeration<URL> findResources(String name) throws IOException {
@SuppressWarnings("unchecked") Enumeration<URL>[] resources = new Enumeration[myParents.length + 1];
resources[0] = super.findResources(name);
for (int idx = 0; idx < myParents.length; idx++) {
resources[idx + 1] = myParents[idx].getResources(name);
}
return new DeepEnumeration(resources);
}
@SuppressWarnings("UnusedDeclaration")
public void addLibDirectories(@NotNull Collection<String> libDirectories) {
myLibDirectories.addAll(libDirectories);
}
@Override
protected String findLibrary(String libName) {
if (!myLibDirectories.isEmpty()) {
String libFileName = System.mapLibraryName(libName);
ListIterator<String> i = myLibDirectories.listIterator(myLibDirectories.size());
while (i.hasPrevious()) {
File libFile = new File(i.previous(), libFileName);
if (libFile.exists()) {
return libFile.getAbsolutePath();
}
}
}
return null;
}
public PluginId getPluginId() {
return myPluginId;
}
@Override
public String toString() {
return "PluginClassLoader[" + myPluginId + ", " + myPluginVersion + "] " + super.toString();
}
private static class DeepEnumeration implements Enumeration<URL> {
private final Enumeration<URL>[] myEnumerations;
private int myIndex;
DeepEnumeration(Enumeration<URL>[] enumerations) {
myEnumerations = enumerations;
}
@Override
public boolean hasMoreElements() {
while (myIndex < myEnumerations.length) {
Enumeration<URL> e = myEnumerations[myIndex];
if (e != null && e.hasMoreElements()) return true;
myIndex++;
}
return false;
}
@Override
public URL nextElement() {
if (!hasMoreElements()) {
throw new NoSuchElementException();
}
return myEnumerations[myIndex].nextElement();
}
}
}
| |
package org.knowm.xchange.yobit;
import static org.apache.commons.lang3.StringUtils.join;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.meta.CurrencyMetaData;
import org.knowm.xchange.dto.meta.CurrencyPairMetaData;
import org.knowm.xchange.dto.meta.ExchangeMetaData;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.utils.DateUtils;
import org.knowm.xchange.yobit.dto.marketdata.YoBitAsksBidsData;
import org.knowm.xchange.yobit.dto.marketdata.YoBitInfo;
import org.knowm.xchange.yobit.dto.marketdata.YoBitOrderBook;
import org.knowm.xchange.yobit.dto.marketdata.YoBitPair;
import org.knowm.xchange.yobit.dto.marketdata.YoBitPairs;
import org.knowm.xchange.yobit.dto.marketdata.YoBitTicker;
import org.knowm.xchange.yobit.dto.marketdata.YoBitTrade;
public class YoBitAdapters {
public static CurrencyPair adaptCurrencyPair(String pair) {
String[] currencies = pair.toUpperCase().split("_");
return new CurrencyPair(adaptCurrency(currencies[0]), adaptCurrency(currencies[1]));
}
public static Currency adaptCurrency(String ccy) {
return Currency.getInstance(ccy.toUpperCase());
}
public static OrderBook adaptOrderBook(YoBitOrderBook book, CurrencyPair currencyPair) {
List<LimitOrder> asks = toLimitOrderList(book.getAsks(), OrderType.ASK, currencyPair);
List<LimitOrder> bids = toLimitOrderList(book.getBids(), OrderType.BID, currencyPair);
return new OrderBook(null, asks, bids);
}
public static ExchangeMetaData adaptToExchangeMetaData(ExchangeMetaData exchangeMetaData, YoBitInfo products) {
Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = exchangeMetaData.getCurrencyPairs();
Map<Currency, CurrencyMetaData> currencies = exchangeMetaData.getCurrencies();
YoBitPairs pairs = products.getPairs();
Map<CurrencyPair, YoBitPair> price = pairs.getPrice();
for (Entry<CurrencyPair, YoBitPair> entry : price.entrySet()) {
CurrencyPair pair = entry.getKey();
YoBitPair value = entry.getValue();
BigDecimal minSize = value.getMin_amount();
Integer priceScale = value.getDecimal_places();
currencyPairs.put(pair, new CurrencyPairMetaData(value.getFee(), minSize, null, priceScale));
if (!currencies.containsKey(pair.base))
currencies.put(pair.base, new CurrencyMetaData(8));
if (!currencies.containsKey(pair.counter))
currencies.put(pair.counter, new CurrencyMetaData(8));
}
return exchangeMetaData;
}
private static List<LimitOrder> toLimitOrderList(List<YoBitAsksBidsData> levels, OrderType orderType, CurrencyPair currencyPair) {
List<LimitOrder> allLevels = new ArrayList<>(levels.size());
for (int i = 0; i < levels.size(); i++) {
YoBitAsksBidsData ask = levels.get(i);
if (ask != null) {
allLevels.add(new LimitOrder(orderType, ask.getQuantity(), currencyPair, "0", null, ask.getRate()));
}
}
return allLevels;
}
public static Trades adaptTrades(List<YoBitTrade> ctrades, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>(ctrades.size());
int lastTrade = 0;
for (int i = 0; i < ctrades.size(); i++) {
YoBitTrade trade = ctrades.get(i);
OrderType type = trade.getType().equals("bid") ? OrderType.BID : OrderType.ASK;
Trade t = new Trade(type, trade.getAmount(), currencyPair, trade.getPrice(), parseDate(trade.getTimestamp()), String.valueOf(trade.getTid()));
trades.add(t);
lastTrade = i;
}
return new Trades(trades, ctrades.get(lastTrade).getTid(), TradeSortType.SortByID);
}
private static Date parseDate(Long rawDateLong) {
return new Date(rawDateLong * 1000);
}
public static Ticker adaptTicker(YoBitTicker ticker, CurrencyPair currencyPair) {
Ticker.Builder builder = new Ticker.Builder();
builder.currencyPair(currencyPair);
builder.last(ticker.getLast());
builder.bid(ticker.getBuy());
builder.ask(ticker.getSell());
builder.high(ticker.getHigh());
builder.low(ticker.getLow());
builder.volume(ticker.getVolCur());
builder.timestamp(new Date(ticker.getUpdated() * 1000L));
return builder.build();
}
public static String adaptCcyPairsToUrlFormat(Iterable<CurrencyPair> currencyPairs) {
List<String> pairs = new ArrayList<>();
for (CurrencyPair currencyPair : currencyPairs) {
pairs.add(adaptCcyPairToUrlFormat(currencyPair));
}
return join(pairs, "-");
}
public static String adaptCcyPairToUrlFormat(CurrencyPair currencyPair) {
return currencyPair.base.getCurrencyCode().toLowerCase() + "_" + currencyPair.counter.getCurrencyCode().toLowerCase();
}
public static OrderType adaptType(String type) {
return type.equalsIgnoreCase("sell") ? OrderType.ASK : OrderType.BID;
}
public static Order.OrderStatus adaptOrderStatus(String status) {
Order.OrderStatus orderStatus = Order.OrderStatus.PARTIALLY_FILLED;
switch (status) {
case "0": {
orderStatus = Order.OrderStatus.NEW;
break;
}
case "1": {
orderStatus = Order.OrderStatus.FILLED;
break;
}
case "2": {
orderStatus = Order.OrderStatus.CANCELED;
break;
}
case "3": {
orderStatus = Order.OrderStatus.STOPPED;
}
}
return orderStatus;
}
public static LimitOrder adaptOrder(String orderId, Map map) {
String pair = map.get("pair").toString();
String type = map.get("type").toString();
// String initialAmount = map.get("start_amount").toString();
String amountRemaining = map.get("amount").toString();
String rate = map.get("rate").toString();
String timestamp = map.get("timestamp_created").toString();
String status = map.get("status").toString();//status: 0 - active, 1 - fulfilled and closed, 2 - cancelled, 3 - cancelled after partially fulfilled.
Date time = DateUtils.fromUnixTime(Long.valueOf(timestamp));
Order.OrderStatus orderStatus = adaptOrderStatus(status);
return new LimitOrder(
adaptType(type),
new BigDecimal(amountRemaining),
adaptCurrencyPair(pair),
orderId,
time,
new BigDecimal(rate),
null,
null,
orderStatus
);
}
public static UserTrade adaptUserTrade(Object key, Map tradeData) {
String id = key.toString();
String type = tradeData.get("type").toString();
String amount = tradeData.get("amount").toString();
String rate = tradeData.get("rate").toString();
String orderId = tradeData.get("order_id").toString();
String pair = tradeData.get("pair").toString();
String timestamp = tradeData.get("timestamp").toString();
Date time = DateUtils.fromUnixTime(Long.valueOf(timestamp));
return new UserTrade(
adaptType(type),
new BigDecimal(amount),
adaptCurrencyPair(pair),
new BigDecimal(rate),
time,
id,
orderId,
null,
null
);
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2019_03_01;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.arm.resources.models.Resource;
import com.microsoft.azure.arm.resources.models.GroupableResourceCore;
import com.microsoft.azure.arm.resources.models.HasResourceGroup;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.compute.v2019_03_01.implementation.ComputeManager;
import java.util.List;
import com.microsoft.azure.SubResource;
import com.microsoft.azure.management.compute.v2019_03_01.implementation.AvailabilitySetInner;
/**
* Type representing AvailabilitySet.
*/
public interface AvailabilitySet extends HasInner<AvailabilitySetInner>, Resource, GroupableResourceCore<ComputeManager, AvailabilitySetInner>, HasResourceGroup, Refreshable<AvailabilitySet>, Updatable<AvailabilitySet.Update>, HasManager<ComputeManager> {
/**
* @return the platformFaultDomainCount value.
*/
Integer platformFaultDomainCount();
/**
* @return the platformUpdateDomainCount value.
*/
Integer platformUpdateDomainCount();
/**
* @return the proximityPlacementGroup value.
*/
SubResource proximityPlacementGroup();
/**
* @return the sku value.
*/
Sku sku();
/**
* @return the statuses value.
*/
List<InstanceViewStatus> statuses();
/**
* @return the virtualMachines value.
*/
List<SubResource> virtualMachines();
/**
* The entirety of the AvailabilitySet definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGroup, DefinitionStages.WithCreate {
}
/**
* Grouping of AvailabilitySet definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a AvailabilitySet definition.
*/
interface Blank extends GroupableResourceCore.DefinitionWithRegion<WithGroup> {
}
/**
* The stage of the AvailabilitySet definition allowing to specify the resource group.
*/
interface WithGroup extends GroupableResourceCore.DefinitionStages.WithGroup<WithCreate> {
}
/**
* The stage of the availabilityset definition allowing to specify PlatformFaultDomainCount.
*/
interface WithPlatformFaultDomainCount {
/**
* Specifies platformFaultDomainCount.
* @param platformFaultDomainCount Fault Domain count
* @return the next definition stage
*/
WithCreate withPlatformFaultDomainCount(Integer platformFaultDomainCount);
}
/**
* The stage of the availabilityset definition allowing to specify PlatformUpdateDomainCount.
*/
interface WithPlatformUpdateDomainCount {
/**
* Specifies platformUpdateDomainCount.
* @param platformUpdateDomainCount Update Domain count
* @return the next definition stage
*/
WithCreate withPlatformUpdateDomainCount(Integer platformUpdateDomainCount);
}
/**
* The stage of the availabilityset definition allowing to specify ProximityPlacementGroup.
*/
interface WithProximityPlacementGroup {
/**
* Specifies proximityPlacementGroup.
* @param proximityPlacementGroup Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01
* @return the next definition stage
*/
WithCreate withProximityPlacementGroup(SubResource proximityPlacementGroup);
}
/**
* The stage of the availabilityset definition allowing to specify Sku.
*/
interface WithSku {
/**
* Specifies sku.
* @param sku Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'
* @return the next definition stage
*/
WithCreate withSku(Sku sku);
}
/**
* The stage of the availabilityset definition allowing to specify VirtualMachines.
*/
interface WithVirtualMachines {
/**
* Specifies virtualMachines.
* @param virtualMachines A list of references to all virtual machines in the availability set
* @return the next definition stage
*/
WithCreate withVirtualMachines(List<SubResource> virtualMachines);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<AvailabilitySet>, Resource.DefinitionWithTags<WithCreate>, DefinitionStages.WithPlatformFaultDomainCount, DefinitionStages.WithPlatformUpdateDomainCount, DefinitionStages.WithProximityPlacementGroup, DefinitionStages.WithSku, DefinitionStages.WithVirtualMachines {
}
}
/**
* The template for a AvailabilitySet update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<AvailabilitySet>, Resource.UpdateWithTags<Update>, UpdateStages.WithPlatformFaultDomainCount, UpdateStages.WithPlatformUpdateDomainCount, UpdateStages.WithProximityPlacementGroup, UpdateStages.WithSku, UpdateStages.WithVirtualMachines {
}
/**
* Grouping of AvailabilitySet update stages.
*/
interface UpdateStages {
/**
* The stage of the availabilityset update allowing to specify PlatformFaultDomainCount.
*/
interface WithPlatformFaultDomainCount {
/**
* Specifies platformFaultDomainCount.
* @param platformFaultDomainCount Fault Domain count
* @return the next update stage
*/
Update withPlatformFaultDomainCount(Integer platformFaultDomainCount);
}
/**
* The stage of the availabilityset update allowing to specify PlatformUpdateDomainCount.
*/
interface WithPlatformUpdateDomainCount {
/**
* Specifies platformUpdateDomainCount.
* @param platformUpdateDomainCount Update Domain count
* @return the next update stage
*/
Update withPlatformUpdateDomainCount(Integer platformUpdateDomainCount);
}
/**
* The stage of the availabilityset update allowing to specify ProximityPlacementGroup.
*/
interface WithProximityPlacementGroup {
/**
* Specifies proximityPlacementGroup.
* @param proximityPlacementGroup Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01
* @return the next update stage
*/
Update withProximityPlacementGroup(SubResource proximityPlacementGroup);
}
/**
* The stage of the availabilityset update allowing to specify Sku.
*/
interface WithSku {
/**
* Specifies sku.
* @param sku Sku of the availability set
* @return the next update stage
*/
Update withSku(Sku sku);
}
/**
* The stage of the availabilityset update allowing to specify VirtualMachines.
*/
interface WithVirtualMachines {
/**
* Specifies virtualMachines.
* @param virtualMachines A list of references to all virtual machines in the availability set
* @return the next update stage
*/
Update withVirtualMachines(List<SubResource> virtualMachines);
}
}
}
| |
// File generated from our OpenAPI spec
package com.stripe.param.reporting;
import com.google.gson.annotations.SerializedName;
import com.stripe.net.ApiRequestParams;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.Getter;
@Getter
public class ReportRunListParams extends ApiRequestParams {
@SerializedName("created")
Object created;
/**
* A cursor for use in pagination. {@code ending_before} is an object ID that defines your place
* in the list. For instance, if you make a list request and receive 100 objects, starting with
* {@code obj_bar}, your subsequent call can include {@code ending_before=obj_bar} in order to
* fetch the previous page of the list.
*/
@SerializedName("ending_before")
String endingBefore;
/** Specifies which fields in the response should be expanded. */
@SerializedName("expand")
List<String> expand;
/**
* Map of extra parameters for custom features not available in this client library. The content
* in this map is not serialized under this field's {@code @SerializedName} value. Instead, each
* key/value pair is serialized as if the key is a root-level field (serialized) name in this
* param object. Effectively, this map is flattened to its parent instance.
*/
@SerializedName(ApiRequestParams.EXTRA_PARAMS_KEY)
Map<String, Object> extraParams;
/**
* A limit on the number of objects to be returned. Limit can range between 1 and 100, and the
* default is 10.
*/
@SerializedName("limit")
Long limit;
/**
* A cursor for use in pagination. {@code starting_after} is an object ID that defines your place
* in the list. For instance, if you make a list request and receive 100 objects, ending with
* {@code obj_foo}, your subsequent call can include {@code starting_after=obj_foo} in order to
* fetch the next page of the list.
*/
@SerializedName("starting_after")
String startingAfter;
private ReportRunListParams(
Object created,
String endingBefore,
List<String> expand,
Map<String, Object> extraParams,
Long limit,
String startingAfter) {
this.created = created;
this.endingBefore = endingBefore;
this.expand = expand;
this.extraParams = extraParams;
this.limit = limit;
this.startingAfter = startingAfter;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Object created;
private String endingBefore;
private List<String> expand;
private Map<String, Object> extraParams;
private Long limit;
private String startingAfter;
/** Finalize and obtain parameter instance from this builder. */
public ReportRunListParams build() {
return new ReportRunListParams(
this.created,
this.endingBefore,
this.expand,
this.extraParams,
this.limit,
this.startingAfter);
}
public Builder setCreated(Created created) {
this.created = created;
return this;
}
public Builder setCreated(Long created) {
this.created = created;
return this;
}
/**
* A cursor for use in pagination. {@code ending_before} is an object ID that defines your place
* in the list. For instance, if you make a list request and receive 100 objects, starting with
* {@code obj_bar}, your subsequent call can include {@code ending_before=obj_bar} in order to
* fetch the previous page of the list.
*/
public Builder setEndingBefore(String endingBefore) {
this.endingBefore = endingBefore;
return this;
}
/**
* Add an element to `expand` list. A list is initialized for the first `add/addAll` call, and
* subsequent calls adds additional elements to the original list. See {@link
* ReportRunListParams#expand} for the field documentation.
*/
public Builder addExpand(String element) {
if (this.expand == null) {
this.expand = new ArrayList<>();
}
this.expand.add(element);
return this;
}
/**
* Add all elements to `expand` list. A list is initialized for the first `add/addAll` call, and
* subsequent calls adds additional elements to the original list. See {@link
* ReportRunListParams#expand} for the field documentation.
*/
public Builder addAllExpand(List<String> elements) {
if (this.expand == null) {
this.expand = new ArrayList<>();
}
this.expand.addAll(elements);
return this;
}
/**
* Add a key/value pair to `extraParams` map. A map is initialized for the first `put/putAll`
* call, and subsequent calls add additional key/value pairs to the original map. See {@link
* ReportRunListParams#extraParams} for the field documentation.
*/
public Builder putExtraParam(String key, Object value) {
if (this.extraParams == null) {
this.extraParams = new HashMap<>();
}
this.extraParams.put(key, value);
return this;
}
/**
* Add all map key/value pairs to `extraParams` map. A map is initialized for the first
* `put/putAll` call, and subsequent calls add additional key/value pairs to the original map.
* See {@link ReportRunListParams#extraParams} for the field documentation.
*/
public Builder putAllExtraParam(Map<String, Object> map) {
if (this.extraParams == null) {
this.extraParams = new HashMap<>();
}
this.extraParams.putAll(map);
return this;
}
/**
* A limit on the number of objects to be returned. Limit can range between 1 and 100, and the
* default is 10.
*/
public Builder setLimit(Long limit) {
this.limit = limit;
return this;
}
/**
* A cursor for use in pagination. {@code starting_after} is an object ID that defines your
* place in the list. For instance, if you make a list request and receive 100 objects, ending
* with {@code obj_foo}, your subsequent call can include {@code starting_after=obj_foo} in
* order to fetch the next page of the list.
*/
public Builder setStartingAfter(String startingAfter) {
this.startingAfter = startingAfter;
return this;
}
}
@Getter
public static class Created {
/**
* Map of extra parameters for custom features not available in this client library. The content
* in this map is not serialized under this field's {@code @SerializedName} value. Instead, each
* key/value pair is serialized as if the key is a root-level field (serialized) name in this
* param object. Effectively, this map is flattened to its parent instance.
*/
@SerializedName(ApiRequestParams.EXTRA_PARAMS_KEY)
Map<String, Object> extraParams;
/** Minimum value to filter by (exclusive). */
@SerializedName("gt")
Long gt;
/** Minimum value to filter by (inclusive). */
@SerializedName("gte")
Long gte;
/** Maximum value to filter by (exclusive). */
@SerializedName("lt")
Long lt;
/** Maximum value to filter by (inclusive). */
@SerializedName("lte")
Long lte;
private Created(Map<String, Object> extraParams, Long gt, Long gte, Long lt, Long lte) {
this.extraParams = extraParams;
this.gt = gt;
this.gte = gte;
this.lt = lt;
this.lte = lte;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Map<String, Object> extraParams;
private Long gt;
private Long gte;
private Long lt;
private Long lte;
/** Finalize and obtain parameter instance from this builder. */
public Created build() {
return new Created(this.extraParams, this.gt, this.gte, this.lt, this.lte);
}
/**
* Add a key/value pair to `extraParams` map. A map is initialized for the first `put/putAll`
* call, and subsequent calls add additional key/value pairs to the original map. See {@link
* ReportRunListParams.Created#extraParams} for the field documentation.
*/
public Builder putExtraParam(String key, Object value) {
if (this.extraParams == null) {
this.extraParams = new HashMap<>();
}
this.extraParams.put(key, value);
return this;
}
/**
* Add all map key/value pairs to `extraParams` map. A map is initialized for the first
* `put/putAll` call, and subsequent calls add additional key/value pairs to the original map.
* See {@link ReportRunListParams.Created#extraParams} for the field documentation.
*/
public Builder putAllExtraParam(Map<String, Object> map) {
if (this.extraParams == null) {
this.extraParams = new HashMap<>();
}
this.extraParams.putAll(map);
return this;
}
/** Minimum value to filter by (exclusive). */
public Builder setGt(Long gt) {
this.gt = gt;
return this;
}
/** Minimum value to filter by (inclusive). */
public Builder setGte(Long gte) {
this.gte = gte;
return this;
}
/** Maximum value to filter by (exclusive). */
public Builder setLt(Long lt) {
this.lt = lt;
return this;
}
/** Maximum value to filter by (inclusive). */
public Builder setLte(Long lte) {
this.lte = lte;
return this;
}
}
}
}
| |
/*
* This file is part of JICI, licensed under the MIT License (MIT).
*
* Copyright (c) 2015-2016 Aleksi Sapon <http://sapon.ca/jici/>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package ca.sapon.jici.test;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import ca.sapon.jici.lexer.Keyword;
import ca.sapon.jici.lexer.Lexer;
import ca.sapon.jici.lexer.LexerException;
import ca.sapon.jici.lexer.Symbol;
import ca.sapon.jici.lexer.Token;
import ca.sapon.jici.lexer.TokenGroup;
import ca.sapon.jici.lexer.TokenID;
public class LexerTest {
@Test
public void testLexEmpty() {
Assert.assertEquals(0, Lexer.lex("").size());
}
@Test
public void testLexSpaces() {
Assert.assertEquals(0, Lexer.lex(" \t\f").size());
}
@Test
public void testLexLineTerminators() {
Assert.assertEquals(0, Lexer.lex("\r\n").size());
}
@Test
public void testLexIdentifier() {
testLex(TokenID.IDENTIFIER, "t");
testLex(TokenID.IDENTIFIER, "test");
testLex(TokenID.IDENTIFIER, "_");
testLex(TokenID.IDENTIFIER, "_t");
testLex(TokenID.IDENTIFIER, "_test");
testLex(TokenID.IDENTIFIER, "te_st");
testLex(TokenID.IDENTIFIER, "test_");
testLex(TokenID.IDENTIFIER, "t_");
testLex(TokenID.IDENTIFIER, "$");
testLex(TokenID.IDENTIFIER, "$t");
testLex(TokenID.IDENTIFIER, "$test");
testLex(TokenID.IDENTIFIER, "te$st");
testLex(TokenID.IDENTIFIER, "test$");
testLex(TokenID.IDENTIFIER, "t$");
testLex(TokenID.IDENTIFIER, "t1");
testLex(TokenID.IDENTIFIER, "_1");
testLex(TokenID.IDENTIFIER, "$1");
testLex(TokenID.IDENTIFIER, "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_$");
}
@Test
public void testLexKeyword() {
for (Keyword keyword : Keyword.all()) {
testLex(keyword.getID(), keyword.getSource());
}
}
@Test
public void testLexSymbol() {
for (Symbol symbol : Symbol.all()) {
if (symbol.getGroup() != TokenGroup.COMMENT_DELIMITER) {
testLex(symbol.getID(), symbol.getSource());
}
}
}
@Test
public void testLexBooleanLiteral() {
testLex(TokenID.LITERAL_TRUE, "true");
testLex(TokenID.LITERAL_FALSE, "false");
}
@Test
public void testLexCharacterLiteral() {
testLex(TokenID.LITERAL_CHARACTER, "'a'");
testLex(TokenID.LITERAL_CHARACTER, "'\\''");
testLex(TokenID.LITERAL_CHARACTER, "'\\\\'");
testLex(TokenID.LITERAL_CHARACTER, "'\\n'");
testLex(TokenID.LITERAL_CHARACTER, "'\\u0061'");
}
@Test
public void testLexStringLiteral() {
testLex(TokenID.LITERAL_STRING, "\"\"");
testLex(TokenID.LITERAL_STRING, "\"t\"");
testLex(TokenID.LITERAL_STRING, "\"test\"");
testLex(TokenID.LITERAL_STRING, "\"this is a test\"");
testLex(TokenID.LITERAL_STRING, "\"\\\"\"");
testLex(TokenID.LITERAL_STRING, "\"\\\\\"");
testLex(TokenID.LITERAL_STRING, "\"\\n\"");
}
@Test
public void testLexNullLiteral() {
testLex(TokenID.LITERAL_NULL, "null");
}
@Test
public void testLexDoubleLiteral() {
testLex(TokenID.LITERAL_DOUBLE, "1.");
testLex(TokenID.LITERAL_DOUBLE, "1.0");
testLex(TokenID.LITERAL_DOUBLE, ".1");
testLex(TokenID.LITERAL_DOUBLE, "1d");
testLex(TokenID.LITERAL_DOUBLE, "1.d");
testLex(TokenID.LITERAL_DOUBLE, "1.0d");
testLex(TokenID.LITERAL_DOUBLE, ".1d");
testLex(TokenID.LITERAL_DOUBLE, "1e2");
testLex(TokenID.LITERAL_DOUBLE, "1.e2");
testLex(TokenID.LITERAL_DOUBLE, "1.0e2");
testLex(TokenID.LITERAL_DOUBLE, ".1e2");
testLex(TokenID.LITERAL_DOUBLE, "1e2d");
testLex(TokenID.LITERAL_DOUBLE, "1.e2d");
testLex(TokenID.LITERAL_DOUBLE, "1.0e2d");
testLex(TokenID.LITERAL_DOUBLE, ".1e2d");
testLex(TokenID.LITERAL_DOUBLE, "1e-2");
testLex(TokenID.LITERAL_DOUBLE, "1.e-2");
testLex(TokenID.LITERAL_DOUBLE, "1.0e-2");
testLex(TokenID.LITERAL_DOUBLE, ".1e-2");
testLex(TokenID.LITERAL_DOUBLE, "1e-2d");
testLex(TokenID.LITERAL_DOUBLE, "1.e-2d");
testLex(TokenID.LITERAL_DOUBLE, "1.0e-2d");
testLex(TokenID.LITERAL_DOUBLE, ".1e-2d");
testLex(TokenID.LITERAL_DOUBLE, "0x1p2");
testLex(TokenID.LITERAL_DOUBLE, "0x1.p2");
testLex(TokenID.LITERAL_DOUBLE, "0x.fp2");
testLex(TokenID.LITERAL_DOUBLE, "0x1.fp2");
testLex(TokenID.LITERAL_DOUBLE, "0x1p2d");
testLex(TokenID.LITERAL_DOUBLE, "0x1.p2d");
testLex(TokenID.LITERAL_DOUBLE, "0x.fp2d");
testLex(TokenID.LITERAL_DOUBLE, "0x1.fp2d");
testLex(TokenID.LITERAL_DOUBLE, "1D");
testLex(TokenID.LITERAL_DOUBLE, "1E2");
testLex(TokenID.LITERAL_DOUBLE, "1E2D");
testLex(TokenID.LITERAL_DOUBLE, "0X1P2");
testLex(TokenID.LITERAL_DOUBLE, "0X1P2D");
testLex(TokenID.LITERAL_DOUBLE, "1234567890d");
}
@Test
public void testLexFloatLiteral() {
testLex(TokenID.LITERAL_FLOAT, "1f");
testLex(TokenID.LITERAL_FLOAT, "1.f");
testLex(TokenID.LITERAL_FLOAT, "1.0f");
testLex(TokenID.LITERAL_FLOAT, ".1f");
testLex(TokenID.LITERAL_FLOAT, "1e2f");
testLex(TokenID.LITERAL_FLOAT, "1.e2f");
testLex(TokenID.LITERAL_FLOAT, "1.0e2f");
testLex(TokenID.LITERAL_FLOAT, ".1e2f");
testLex(TokenID.LITERAL_FLOAT, "1e-2f");
testLex(TokenID.LITERAL_FLOAT, "1.e-2f");
testLex(TokenID.LITERAL_FLOAT, "1.0e-2f");
testLex(TokenID.LITERAL_FLOAT, ".1e-2f");
testLex(TokenID.LITERAL_FLOAT, "0x1p2f");
testLex(TokenID.LITERAL_FLOAT, "0x1.p2f");
testLex(TokenID.LITERAL_FLOAT, "0x.fp2f");
testLex(TokenID.LITERAL_FLOAT, "0x1.fp2f");
testLex(TokenID.LITERAL_FLOAT, "1F");
testLex(TokenID.LITERAL_FLOAT, "1E2F");
testLex(TokenID.LITERAL_FLOAT, "0X1P2F");
testLex(TokenID.LITERAL_FLOAT, "1234567890f");
}
@Test
public void testLexIntLiteral() {
testLex(TokenID.LITERAL_INT, "1");
testLex(TokenID.LITERAL_INT, "0x1");
testLex(TokenID.LITERAL_INT, "0X1F");
testLex(TokenID.LITERAL_INT, "0b1");
testLex(TokenID.LITERAL_INT, "0B11");
testLex(TokenID.LITERAL_INT, "01");
testLex(TokenID.LITERAL_INT, "017");
testLex(TokenID.LITERAL_INT, "1234567890");
}
@Test
public void testLexLongLiteral() {
testLex(TokenID.LITERAL_LONG, "1l");
testLex(TokenID.LITERAL_LONG, "0x1l");
testLex(TokenID.LITERAL_LONG, "0X1Fl");
testLex(TokenID.LITERAL_LONG, "0b1l");
testLex(TokenID.LITERAL_LONG, "0B11l");
testLex(TokenID.LITERAL_LONG, "01l");
testLex(TokenID.LITERAL_LONG, "017l");
testLex(TokenID.LITERAL_LONG, "1L");
testLex(TokenID.LITERAL_LONG, "0X1L");
testLex(TokenID.LITERAL_LONG, "0B1L");
testLex(TokenID.LITERAL_LONG, "017L");
testLex(TokenID.LITERAL_LONG, "1234567890l");
}
@Test
public void testLexComments() {
Assert.assertEquals(0, Lexer.lex("//abcd").size());
Assert.assertEquals(0, Lexer.lex("///abcd").size());
Assert.assertEquals(0, Lexer.lex("//abcd\n").size());
Assert.assertEquals(0, Lexer.lex("//abcd\r").size());
Assert.assertEquals(0, Lexer.lex("//abcd\r\n").size());
Assert.assertEquals(0, Lexer.lex("/*abcd").size());
Assert.assertEquals(0, Lexer.lex("/*abcd*/").size());
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("//0\na"));
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("//0\ra"));
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("//0\r\na"));
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("/*0*/a"));
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("///*\na"));
assertEquals(TokenID.IDENTIFIER, "a", Lexer.lex("/*//*/a"));
}
@Test
public void testLexUnknownCharacter() {
try {
testLex(null, "#");
Assert.fail();
} catch (LexerException ignored) {
}
try {
testLex(null, "te#st");
Assert.fail();
} catch (LexerException ignored) {
}
}
private void testLex(TokenID expectedID, String source) {
assertEquals(expectedID, source, Lexer.lex(source));
}
private void assertEquals(TokenID expectedID, String expectedSource, List<Token> actual) {
Assert.assertEquals("Expected one token, got many", 1, actual.size());
assertEquals(expectedID, expectedSource, actual.get(0));
}
private void assertEquals(TokenID expectedID, String expectedSource, Token actual) {
Assert.assertEquals("Expected ID didn't match actual ID", expectedID, actual.getID());
Assert.assertEquals("Expected source didn't match actual source", expectedSource, actual.getSource());
}
}
| |
/*
* This file is a part of the SchemaSpy project (http://schemaspy.sourceforge.net).
* Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 John Currier
*
* SchemaSpy is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* SchemaSpy is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.sourceforge.schemaspy.view;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import net.sourceforge.schemaspy.model.Database;
import net.sourceforge.schemaspy.model.Table;
import net.sourceforge.schemaspy.model.TableColumn;
import net.sourceforge.schemaspy.model.TableIndex;
import net.sourceforge.schemaspy.util.LineWriter;
/**
* The page that lists all of the columns in the schema,
* allowing the end user to sort by column's attributes.
*
* @author John Currier
*/
public class HtmlColumnsPage extends HtmlFormatter {
private static HtmlColumnsPage instance = new HtmlColumnsPage();
/**
* Singleton: Don't allow instantiation
*/
private HtmlColumnsPage() {
}
/**
* Singleton accessor
*
* @return the singleton instance
*/
public static HtmlColumnsPage getInstance() {
return instance;
}
/**
* Returns details about the columns that are displayed on this page.
*
* @return
*/
public List<ColumnInfo> getColumnInfos()
{
List<ColumnInfo> columns = new ArrayList<ColumnInfo>();
columns.add(new ColumnInfo("Table", new ByTableComparator()));
columns.add(new ColumnInfo("Column", new ByColumnComparator()));
columns.add(new ColumnInfo("Type", new ByTypeComparator()));
columns.add(new ColumnInfo("Size", new BySizeComparator()));
columns.add(new ColumnInfo("Nulls", new ByNullableComparator()));
columns.add(new ColumnInfo("Auto", new ByAutoUpdateComparator()));
columns.add(new ColumnInfo("Default", new ByDefaultValueComparator()));
return columns;
}
public class ColumnInfo
{
private final String columnName;
private final Comparator<TableColumn> comparator;
private ColumnInfo(String columnName, Comparator<TableColumn> comparator)
{
this.columnName = columnName;
this.comparator = comparator;
}
public String getColumnName() {
return columnName;
}
public String getLocation() {
return getLocation(columnName);
}
public String getLocation(String colName) {
return "columns.by" + colName + ".html";
}
private Comparator<TableColumn> getComparator() {
return comparator;
}
@Override
public String toString() {
return getLocation();
}
}
public void write(Database database, Collection<Table> tables, ColumnInfo columnInfo, boolean showOrphansDiagram, LineWriter html) throws IOException {
Set<TableColumn> columns = new TreeSet<TableColumn>(columnInfo.getComparator());
Set<TableColumn> primaryColumns = new HashSet<TableColumn>();
Set<TableColumn> indexedColumns = new HashSet<TableColumn>();
for (Table table : tables) {
columns.addAll(table.getColumns());
primaryColumns.addAll(table.getPrimaryColumns());
for (TableIndex index : table.getIndexes()) {
indexedColumns.addAll(index.getColumns());
}
}
writeHeader(database, columns.size(), showOrphansDiagram, columnInfo, html);
HtmlTablePage formatter = HtmlTablePage.getInstance();
for (TableColumn column : columns) {
formatter.writeColumn(column, column.getTable().getName(), primaryColumns, indexedColumns, true, false, html);
}
writeFooter(html);
}
private void writeHeader(Database db, int numberOfColumns, boolean hasOrphans, ColumnInfo selectedColumn, LineWriter html) throws IOException {
writeHeader(db, null, "Columns", hasOrphans, html);
html.writeln("<table width='100%' border='0'>");
html.writeln("<tr><td class='container'>");
writeGeneratedBy(db.getConnectTime(), html);
html.writeln("</td><td class='container' rowspan='2' align='right' valign='top'>");
writeLegend(false, false, html);
html.writeln("</td></tr>");
html.writeln("<tr valign='top'><td class='container' align='left' valign='top'>");
html.writeln("<p>");
html.writeln("<form name='options' action=''>");
html.writeln(" <label for='showComments'><input type=checkbox id='showComments'>Comments</label>");
html.writeln(" <label for='showLegend'><input type=checkbox checked id='showLegend'>Legend</label>");
html.writeln("</form>");
html.writeln("</table>");
html.writeln("<div class='indent'>");
html.write("<b>");
html.write(db.getName());
if (db.getSchema() != null) {
html.write('.');
html.write(db.getSchema());
}
html.write(" contains ");
html.write(String.valueOf(numberOfColumns));
html.write(" columns</b> - click on heading to sort:");
Collection<Table> tables = db.getTables();
boolean hasTableIds = tables.size() > 0 && tables.iterator().next().getId() != null;
writeMainTableHeader(hasTableIds, selectedColumn, html);
html.writeln("<tbody valign='top'>");
}
public void writeMainTableHeader(boolean hasTableIds, ColumnInfo selectedColumn, LineWriter out) throws IOException {
boolean onColumnsPage = selectedColumn != null;
out.writeln("<a name='columns'></a>");
out.writeln("<table id='columns' class='dataTable' border='1' rules='groups'>");
int numCols = 6; // base number of columns
if (hasTableIds && !onColumnsPage)
++numCols; // for table id
if (onColumnsPage)
++numCols; // for table name
else
numCols += 2; // for children and parents
for (int i = 0; i < numCols; ++i)
out.writeln("<colgroup>");
out.writeln("<colgroup class='comment'>");
out.writeln("<thead align='left'>");
out.writeln("<tr>");
if (hasTableIds && !onColumnsPage)
out.writeln(getTH(selectedColumn, "ID", null, "right"));
if (onColumnsPage)
out.writeln(getTH(selectedColumn, "Table", null, null));
out.writeln(getTH(selectedColumn, "Column", null, null));
out.writeln(getTH(selectedColumn, "Type", null, null));
out.writeln(getTH(selectedColumn, "Size", null, null));
out.writeln(getTH(selectedColumn, "Nulls", "Are nulls allowed?", null));
out.writeln(getTH(selectedColumn, "Auto", "Is column automatically updated?", null));
out.writeln(getTH(selectedColumn, "Default", "Default value", null));
if (!onColumnsPage) {
out.write(" <th title='Columns in tables that reference this column'>");
out.writeln("<span class='notSortedByColumn'>Children</span></th>");
out.write(" <th title='Columns in tables that are referenced by this column'>");
out.writeln("<span class='notSortedByColumn'>Parents</span></th>");
}
out.writeln(" <th title='Comments' class='comment'><span class='notSortedByColumn'>Comments</span></th>");
out.writeln("</tr>");
out.writeln("</thead>");
}
private String getTH(ColumnInfo selectedColumn, String columnName, String title, String align) {
StringBuilder buf = new StringBuilder(" <th");
if (align != null) {
buf.append(" align='");
buf.append(align);
buf.append("'");
}
if (title != null) {
buf.append(" title='");
buf.append(title);
buf.append("'");
}
if (selectedColumn != null) {
if (selectedColumn.getColumnName().equals(columnName)) {
buf.append(" class='sortedByColumn'>");
buf.append(columnName);
} else {
buf.append(" class='notSortedByColumn'>");
buf.append("<a href='");
buf.append(encodeHref(selectedColumn.getLocation(columnName)));
buf.append("#columns'><span class='notSortedByColumn'>");
buf.append(columnName);
buf.append("</span></a>");
}
} else {
buf.append('>');
buf.append(columnName);
}
buf.append("</th>");
return buf.toString();
}
@Override
protected void writeFooter(LineWriter html) throws IOException {
html.writeln("</table>");
html.writeln("</div>");
super.writeFooter(html);
}
@Override
protected boolean isColumnsPage() {
return true;
}
private class ByColumnComparator implements Comparator<TableColumn> {
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.getName().compareToIgnoreCase(column2.getName());
if (rc == 0)
rc = column1.getTable().compareTo(column2.getTable());
return rc;
}
}
private class ByTableComparator implements Comparator<TableColumn> {
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.getTable().compareTo(column2.getTable());
if (rc == 0)
rc = column1.getName().compareToIgnoreCase(column2.getName());
return rc;
}
}
private class ByTypeComparator implements Comparator<TableColumn> {
private final Comparator<TableColumn> bySize = new BySizeComparator();
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.getType().compareToIgnoreCase(column2.getType());
if (rc == 0) {
rc = bySize.compare(column1, column2);
}
return rc;
}
}
private class BySizeComparator implements Comparator<TableColumn> {
private final Comparator<TableColumn> byColumn = new ByColumnComparator();
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.getLength() - column2.getLength();
if (rc == 0) {
rc = column1.getDecimalDigits() - column2.getDecimalDigits();
if (rc == 0)
rc = byColumn.compare(column1, column2);
}
return rc;
}
}
private class ByNullableComparator implements Comparator<TableColumn> {
private final Comparator<TableColumn> byColumn = new ByColumnComparator();
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.isNullable() == column2.isNullable() ? 0 : column1.isNullable() ? -1 : 1;
if (rc == 0)
rc = byColumn.compare(column1, column2);
return rc;
}
}
private class ByAutoUpdateComparator implements Comparator<TableColumn> {
private final Comparator<TableColumn> byColumn = new ByColumnComparator();
public int compare(TableColumn column1, TableColumn column2) {
int rc = column1.isAutoUpdated() == column2.isAutoUpdated() ? 0 : column1.isAutoUpdated() ? -1 : 1;
if (rc == 0)
rc = byColumn.compare(column1, column2);
return rc;
}
}
private class ByDefaultValueComparator implements Comparator<TableColumn> {
private final Comparator<TableColumn> byColumn = new ByColumnComparator();
public int compare(TableColumn column1, TableColumn column2) {
int rc = String.valueOf(column1.getDefaultValue()).compareToIgnoreCase(String.valueOf(column2.getDefaultValue()));
if (rc == 0)
rc = byColumn.compare(column1, column2);
return rc;
}
}
}
| |
/*
* arcus-java-client : Arcus Java client
* Copyright 2010-2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.spy.memcached;
import net.spy.memcached.collection.CollectionResponse;
import net.spy.memcached.compat.SpyObject;
import net.spy.memcached.internal.BasicThreadFactory;
import net.spy.memcached.internal.CollectionFuture;
import net.spy.memcached.ops.CollectionOperationStatus;
import net.spy.memcached.ops.StoreType;
import net.spy.memcached.transcoders.Transcoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
class BulkService extends SpyObject {
private static int DEFAULT_LOOP_LIMIT;
private final ExecutorService executor;
private final long singleOpTimeout;
BulkService(int loopLimit, int threadCount, long singleOpTimeout) {
this.executor = new ThreadPoolExecutor(threadCount, threadCount, 60L,
TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(),
new BasicThreadFactory("bulk-service", true),
new ThreadPoolExecutor.AbortPolicy());
BulkService.DEFAULT_LOOP_LIMIT = loopLimit;
this.singleOpTimeout = singleOpTimeout;
}
<T> Future<Map<String, CollectionOperationStatus>> setBulk(
List<String> keys, int exp, T value, Transcoder<T> transcoder,
ArcusClient[] client) {
if (keys == null) {
throw new IllegalArgumentException("Key list is null.");
}
assert !executor.isShutdown() : "Pool has already shut down.";
BulkSetWorker<T> w = new BulkSetWorker<T>(keys, exp, value, transcoder,
client, singleOpTimeout);
BulkService.Task<Map<String, CollectionOperationStatus>> task = new BulkService.Task<Map<String, CollectionOperationStatus>>(
w);
executor.submit(task);
return task;
}
<T> Future<Map<String, CollectionOperationStatus>> setBulk(
Map<String, T> o, int exp, Transcoder<T> transcoder,
ArcusClient[] client) {
if (o == null) {
throw new IllegalArgumentException("Map is null.");
}
assert !executor.isShutdown() : "Pool has already shut down.";
BulkSetWorker<T> w = new BulkSetWorker<T>(o, exp, transcoder, client,
singleOpTimeout);
BulkService.Task<Map<String, CollectionOperationStatus>> task = new BulkService.Task<Map<String, CollectionOperationStatus>>(
w);
executor.submit(task);
return task;
}
void shutdown() {
try {
executor.shutdown();
} catch (Exception e) {
getLogger().warn("exception while shutting down bulk set service.",
e);
}
}
private static class Task<T> extends FutureTask<T> {
private final BulkWorker worker;
public Task(Callable<T> callable) {
super(callable);
this.worker = (BulkWorker) callable;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return worker.cancel() && super.cancel(mayInterruptIfRunning);
}
}
/**
* Bulk operation worker
*/
private abstract static class BulkWorker<T> extends SpyObject implements
Callable<Map<String, CollectionOperationStatus>> {
protected final ArcusClient[] clientList;
protected final ArrayList<Future<Boolean>> future;
protected final long operationTimeout;
protected final AtomicBoolean isRunnable = new AtomicBoolean(true);
protected final Map<String, CollectionOperationStatus> errorList;
protected final int totalCount;
protected final int fromIndex;
protected final int toIndex;
public BulkWorker(Collection keys, long timeout, ArcusClient[] clientList) {
if(keys.size() < 1) {
throw new IllegalArgumentException("Keys size must be greater than 0");
}
this.future = new ArrayList<Future<Boolean>>(keys.size());
this.operationTimeout = timeout;
this.clientList = getOptimalClients(clientList);
this.errorList = new HashMap<String, CollectionOperationStatus>();
fromIndex = 0;
toIndex = keys.size() - 1;
totalCount = toIndex - fromIndex + 1;
}
public boolean cancel() {
if (!isRunnable()) {
return false;
}
isRunnable.set(false);
boolean ret = true;
for (Future<Boolean> f : future) {
if (f == null) {
continue;
}
if (f.isCancelled() || f.isDone()) {
continue;
}
ret &= f.cancel(true);
if (getLogger().isDebugEnabled()) {
getLogger().debug("Cancel the future. " + f);
}
}
getLogger().info("Cancel, bulk set worker.");
return ret;
}
private ArcusClient[] getOptimalClients(ArcusClient[] clientList) {
return clientList;
}
protected boolean isRunnable() {
return isRunnable.get() && !Thread.currentThread().isInterrupted();
}
protected void setErrorOpStatus(String key, int indexOfFuture) {
errorList.put(key,
((CollectionFuture<Boolean>) future.get(indexOfFuture))
.getOperationStatus());
}
public abstract Future<Boolean> processItem(int index);
public abstract void awaitProcessResult(int index);
public Map<String, CollectionOperationStatus> call() throws Exception {
for (int pos = fromIndex; isRunnable() && pos <= toIndex; pos++) {
if ((pos - fromIndex) > 0
&& (pos - fromIndex) % DEFAULT_LOOP_LIMIT == 0) {
for (int i = pos - DEFAULT_LOOP_LIMIT; isRunnable()
&& i < pos; i++) {
awaitProcessResult(i);
}
}
try {
if (isRunnable()) {
future.add(pos, processItem(pos));
}
} catch (IllegalStateException e) {
if (Thread.currentThread().isInterrupted()) {
break;
} else {
throw e;
}
}
}
for (int i = toIndex
- (totalCount % DEFAULT_LOOP_LIMIT == 0 ? DEFAULT_LOOP_LIMIT
: totalCount % DEFAULT_LOOP_LIMIT) + 1; isRunnable()
&& i <= toIndex; i++) {
awaitProcessResult(i);
}
return errorList;
}
}
/**
* Bulk set operation worker
*/
private static class BulkSetWorker<T> extends BulkWorker<T> {
private final List<String> keys;
private final int exp;
private final int cntCos;
private List<CachedData> cos;
public BulkSetWorker(List<String> keys, int exp, T value,
Transcoder<T> transcoder, ArcusClient[] clientList,
long timeout) {
super(keys, timeout, clientList);
this.keys = keys;
this.exp = exp;
this.cos = new ArrayList<CachedData>();
this.cos.add(transcoder.encode(value));
this.cntCos = 1;
}
public BulkSetWorker(Map<String, T> o, int exp,
Transcoder<T> transcoder, ArcusClient[] clientList, long timeout) {
super(o.keySet(), timeout, clientList);
this.keys = new ArrayList<String>(o.keySet());
this.exp = exp;
this.cos = new ArrayList<CachedData>();
for (String key : keys) {
this.cos.add(transcoder.encode(o.get(key)));
}
this.cntCos = this.cos.size();
}
@Override
public Future<Boolean> processItem(int index) {
return clientList[index % clientList.length].asyncStore(
StoreType.set, keys.get(index), exp,
(this.cntCos > 1 ? cos.get(index) : cos.get(0)));
}
@Override
public void awaitProcessResult(int index) {
try {
boolean success = future.get(index).get(operationTimeout,
TimeUnit.MILLISECONDS);
if (!success) {
errorList.put(
keys.get(index),
new CollectionOperationStatus(false, String
.valueOf(success), CollectionResponse.END));
}
} catch (Exception e) {
future.get(index).cancel(true);
errorList.put(keys.get(index), new CollectionOperationStatus(
false, e.getMessage(), CollectionResponse.EXCEPTION));
}
}
}
}
| |
/**
* Name: Jordan Davis
* Lab Instructor: Poonam Dharam
* Lab Section: 102
* Lecture Instructor: Kriangsiri Malasri
* Program Name: Lottery Game
* Date: 4.8.2011
*/
//Incomplete!
//Does not compare the user picked values with the winning values!
import java.util.Scanner;
public class DrawingGame
{
//calculates the chance of winning the lottery drawing game WITHOUT an "extra" number
public static double jackpotChance(int k, int n)
{
//separate variables to hold the values of the numerator and denominator of the equation
long numerator = 1;
long denominator = 1;
//calculates the value of the numerator by multiplying the total value of the numerator by the current value of n then subtracts 1 from n
//loops for k number of times, k being the number of distinct numbers the user gets to pick and n being the highest number the user can pick
for (int i = 0; i < k; i++) {
numerator *= n;
n--;
}
//calculates the value of the denominator by multiplying the total value of the denominator by the current value of k then subtracts 1 from k
//loops until k reaches the value of 0
while (k > 0) {
denominator *= k;
k--;
}
//returns the total number of possible lottery tickets with k distinct values ranging from 1 to n
return (numerator/denominator);
}
//calculates the chance of winning the lottery drawing game WITH an "extra" number
public static double jackpotChance(int k, int n, int m)
{
//separate variables to hold the values of the numerator and denominator of the equation
long numerator = 1;
long denominator = 1;
//calculates the value of the numerator by multiplying the total value of the numerator by the current value of n then subtracts 1 from n
//loops for k number of times, k being the number of distinct numbers the user gets to pick and n being the highest number the user can pick
for (int i = 0; i < k; i++) {
numerator *= n;
n--;
}
//calculates the value of the denominator by multiplying the total value of the denominator by the current value of k then subtracts 1 from k
//loops until k reaches the value of 0
while (k > 0) {
denominator *= k;
k--;
}
//returns the total number of possible lottery tickets with k distinct values ranging from 1 to n with an extra number ranging from 1 to m
return ((numerator/denominator)*m);
}
//makes sure the number the user just picked is distinctive by performing a linear search on all the previously picked numbers
public static boolean checkForDistinction(int a[], int index, int prev[])
{
for (int i = 0; i < a.length; i++) {
if (a[index] == prev[i]) {
return true;
}
}
return false;
}
//checks to see if the number the user just picked is within the range of 1 through n
public static boolean withinRange(int a[], int index, int n)
{
//if the array a is less than one or greater than n return true, otherwise return false
//true means the number at a specific index is outside the range of values 1 through n
//false means the number at a specific index is inside the range of values 1 through n
if (a[index] < 1 || a[index] > n) {
return true;
} else {
return false;
}
}
//generates the winning lottery drawing game numbers without the extra number
public static int[] generateWinningNums(int n, int k)
{
//holds the winning lottery numbers
int compNums[] = new int[k];
//holds all the numbers previously added to compNums
int prevCompNums[] = new int[k];
//randomly generates a number stores it in compNums
//if the number generated is equal to any number previously added to compNums then it continues to generate a new
//number until the number is one that has not yet been generated
for (int i = 0; i < k; i++) {
compNums[i] = (int)(n*Math.random()+1);
do {
if (checkForDistinction(compNums,i,prevCompNums)) {
compNums[i] = (int)(n*Math.random()+1);
}
} while (checkForDistinction(compNums,i,prevCompNums));
//stores the value that was just generated into an array of previously generated values
prevCompNums[i] = compNums[i];
}
return compNums;
}
//generates the winning lottery drawing game numbers for the game with the extra number
//overloaded method that includes the extra number generation at the end of the method
public static int[] generateWinningNums(int n, int k, int m)
{
int compNums[] = new int[k+1];
int prevCompNums[] = new int[k+1];
for (int i = 0; i < k; i++) {
compNums[i] = (int)(n*Math.random()+1);
do {
if (checkForDistinction(compNums,i,prevCompNums)) {
compNums[i] = (int)(n*Math.random()+1);
}
} while (checkForDistinction(compNums,i,prevCompNums));
prevCompNums[i] = compNums[i];
}
//randomly generates an extra number between 1 and m
compNums[k] = (int)(m*Math.random()+1);
return compNums;
}
//prints all the elements of an array on the same line
public static void printArray(int a[])
{
for (int i = 0; i < a.length; i++) {
System.out.print(a[i] + " ");
}
}
public static void main(String[] args)
{
Scanner s = new Scanner(System.in);
//type of drawing game, 1 = without "extra" number, 2 = with "extra" number
int typeGame;
//number of distinctive numbers the player gets to choose
int k;
//highest possible number, sets the range of numbers the user can choose from 1 to n
int n;
//highest possible "extra" number, sets the range of numbers the user can choose from 1 to m
int m;
//asks user if they would like to play the drawing game with or without the "extra" number
System.out.println("Which game would you like to play? (1) without extra number, (2) with extra number");
typeGame = s.nextInt();
//if the user enters a 1 then they are asked for the values of n and k, then calls the jackpotChance(k,n) method
//if the user enters a 2 then they are asked for the values of n, k and m, then calls the jackpotChance(k,n,m) method
if (typeGame == 1) { //drawing game without extra number
System.out.println("How many numbers are there in total?");
n = s.nextInt();
System.out.println("How many numbers do you want to pick?");
k = s.nextInt();
System.out.println("You have a " + (1/jackpotChance(k,n)) + " chance of winning, good luck...\n");
//users numbers
int numbers[] = new int[k];
//numbers previously picked by user
int prevNums[] = new int[k];
//computer generated numbers
int compNumbers[] = new int[k];
//asks the user to pick their numbers to play the drawing game
for (int i = 0; i < k; i++) {
System.out.print("Pick a number between 1 and " + n + ", inclusive: ");
numbers[i] = s.nextInt();
//continue to ask for a new number if the number has already been picked or if the number is outside the range 1 to n
do {
if (checkForDistinction(numbers,i,prevNums)) {
System.out.print(numbers[i] + " has already been picked! Pick another number between 1 and " + n + ": ");
numbers[i] = s.nextInt();
}
if (withinRange(numbers,i,n)) {
System.out.print(numbers[i] + " is out of range! Pick another number between 1 and " + n + ": ");
numbers[i] = s.nextInt();
}
} while (checkForDistinction(numbers,i,prevNums) || withinRange(numbers,i,n));
//adds the number the user just picked to an array containing previously picked numbers
prevNums[i] = numbers[i];
}
System.out.println("Your numbers:");
//displays user numbers
printArray(numbers);
System.out.print("\n\n");
//copies the memory address of the array returned by generateWinningNums to compNumbers
compNumbers = generateWinningNums(n,k);
System.out.println("Winning numbers:");
//displays the computer picked numbers
printArray(compNumbers);
} else if (typeGame == 2) { //drawing game with extra number
System.out.println("How many numbers are there in total?");
n = s.nextInt();
System.out.println("How many number do you want to pick?");
k = s.nextInt();
//asks for the highest possible value of the extra number
System.out.println("What is the highest possible extra number?");
m = s.nextInt();
System.out.println("You have a " + (1/jackpotChance(k,n,m)) + " chance of winning, good luck...\n");
int numbers[] = new int[k+1];
int prevNums[] = new int[k+1];
int compNumbers[] = new int[k+1];
for (int i = 0; i < k; i++) {
System.out.print("Pick a number between 1 and " + n + ", inclusive: ");
numbers[i] = s.nextInt();
do {
if (checkForDistinction(numbers,i,prevNums)) {
System.out.print(numbers[i] + " has already been picked! Pick another number between 1 and " + n + ": ");
numbers[i] = s.nextInt();
}
if (withinRange(numbers,i,n)) {
System.out.print(numbers[i] + " is out of range! Pick another number between 1 and " + n + ": ");
numbers[i] = s.nextInt();
}
} while (checkForDistinction(numbers,i,prevNums) || withinRange(numbers,i,n));
prevNums[i] = numbers[i];
}
//asks the user to pick a value for the extra number
//if the number the user picks is outside the range 1 to m then the user is asked to enter a new
//number until the number is within range
do {
System.out.print("Pick an extra number between 1 and " + m + ", inclusive: ");
numbers[k] = s.nextInt();
if (numbers[k] < 1 || numbers[k] > m) {
System.out.println(numbers[k] + " is out of range! Pick another number between 1 and " + m + ": ");
}
} while (numbers[k] < 1 || numbers[k] > m);
System.out.println("Your numbers:");
printArray(numbers);
System.out.print("\n\n");
compNumbers = generateWinningNums(n,k,m);
System.out.println("Winning numbers:");
printArray(compNumbers);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.hbase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
/**
* TestLazyHBaseObject is a test for the LazyHBaseXXX classes.
*/
public class TestLazyHBaseObject extends TestCase {
/**
* Test the LazyMap class with Integer-to-String.
* @throws SerDeException
*/
public void testLazyHBaseCellMap1() throws SerDeException {
// Map of Integer to String
Text nullSequence = new Text("\\N");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(
TypeInfoUtils.getTypeInfosFromTypeString("map<int,string>").get(0),
new byte[]{(byte)1, (byte)2}, 0, nullSequence, false, (byte)0);
LazyHBaseCellMap b = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
// Initialize a result
List<Cell> kvs = new ArrayList<Cell>();
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfa"),
Bytes.toBytes("col1"), Bytes.toBytes("cfacol1")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfa"),
Bytes.toBytes("col2"), Bytes.toBytes("cfacol2")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfb"),
Bytes.toBytes("2"), Bytes.toBytes("def")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfb"),
Bytes.toBytes("-1"), Bytes.toBytes("")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfb"),
Bytes.toBytes("0"), Bytes.toBytes("0")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfb"),
Bytes.toBytes("8"), Bytes.toBytes("abc")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"), Bytes.toBytes("cfc"),
Bytes.toBytes("col3"), Bytes.toBytes("cfccol3")));
Result r = Result.create(kvs);
List<Boolean> mapBinaryStorage = new ArrayList<Boolean>();
mapBinaryStorage.add(false);
mapBinaryStorage.add(false);
b.init(r, "cfb".getBytes(), mapBinaryStorage);
assertEquals(
new Text("def"),
((LazyString)b.getMapValueElement(
new IntWritable(2))).getWritableObject());
assertNull(b.getMapValueElement(new IntWritable(-1)));
assertEquals(
new Text("0"),
((LazyString)b.getMapValueElement(
new IntWritable(0))).getWritableObject());
assertEquals(
new Text("abc"),
((LazyString)b.getMapValueElement(
new IntWritable(8))).getWritableObject());
assertNull(b.getMapValueElement(new IntWritable(12345)));
assertEquals("{0:'0',2:'def',8:'abc'}".replace('\'', '\"'),
SerDeUtils.getJSONString(b, oi));
}
/**
* Test the LazyMap class with String-to-String.
* @throws SerDeException
*/
public void testLazyHBaseCellMap2() throws SerDeException {
// Map of String to String
Text nullSequence = new Text("\\N");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(
TypeInfoUtils.getTypeInfosFromTypeString("map<string,string>").get(0),
new byte[]{(byte)'#', (byte)'\t'}, 0, nullSequence, false, (byte)0);
LazyHBaseCellMap b = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
// Initialize a result
List<Cell> kvs = new ArrayList<Cell>();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("col1"), Bytes.toBytes("cfacol1")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("col2"), Bytes.toBytes("cfacol2")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("2"), Bytes.toBytes("d\tf")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("-1"), Bytes.toBytes("")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("0"), Bytes.toBytes("0")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("8"), Bytes.toBytes("abc")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfc"), Bytes.toBytes("col3"), Bytes.toBytes("cfccol3")));
Result r = Result.create(kvs);
List<Boolean> mapBinaryStorage = new ArrayList<Boolean>();
mapBinaryStorage.add(false);
mapBinaryStorage.add(false);
b.init(r, "cfb".getBytes(), mapBinaryStorage);
assertEquals(
new Text("d\tf"),
((LazyString)b.getMapValueElement(
new Text("2"))).getWritableObject());
assertNull(b.getMapValueElement(new Text("-1")));
assertEquals(
new Text("0"),
((LazyString)b.getMapValueElement(
new Text("0"))).getWritableObject());
assertEquals(
new Text("abc"),
((LazyString)b.getMapValueElement(
new Text("8"))).getWritableObject());
assertNull(b.getMapValueElement(new Text("-")));
assertEquals(
"{'0':'0','2':'d\\tf','8':'abc'}".replace('\'', '\"'),
SerDeUtils.getJSONString(b, oi));
}
/**
* Test the LazyHBaseCellMap class for the case where both the key and the value in the family
* map are stored in binary format using the appropriate LazyPrimitive objects.
* @throws SerDeException
*/
public void testLazyHBaseCellMap3() throws SerDeException {
Text nullSequence = new Text("\\N");
TypeInfo mapBinaryIntKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<int,int>");
ObjectInspector oi = LazyFactory.createLazyObjectInspector(
mapBinaryIntKeyValue, new byte [] {(byte)1, (byte) 2}, 0, nullSequence, false, (byte) 0);
LazyHBaseCellMap hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
List<Cell> kvs = new ArrayList<Cell>();
byte [] rowKey = "row-key".getBytes();
byte [] cfInt = "cf-int".getBytes();
kvs.add(new KeyValue(rowKey, cfInt, Bytes.toBytes(1), Bytes.toBytes(1)));
Result result = Result.create(kvs);
List<Boolean> mapBinaryStorage = new ArrayList<Boolean>();
mapBinaryStorage.add(true);
mapBinaryStorage.add(true);
hbaseCellMap.init(result, cfInt, mapBinaryStorage);
IntWritable expectedIntValue = new IntWritable(1);
LazyPrimitive<?, ?> lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(
rowKey, cfInt, Bytes.toBytes(Integer.MIN_VALUE), Bytes.toBytes(Integer.MIN_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfInt, mapBinaryStorage);
expectedIntValue = new IntWritable(Integer.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(
rowKey, cfInt, Bytes.toBytes(Integer.MAX_VALUE), Bytes.toBytes(Integer.MAX_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfInt, mapBinaryStorage);
expectedIntValue = new IntWritable(Integer.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedIntValue);
assertEquals(expectedIntValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryByteKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<tinyint,tinyint>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryByteKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfByte = "cf-byte".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfByte, new byte [] {(byte) 1}, new byte [] {(byte) 1}));
result = Result.create(kvs);
hbaseCellMap.init(result, cfByte, mapBinaryStorage);
ByteWritable expectedByteValue = new ByteWritable((byte) 1);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MIN_VALUE},
new byte [] {Byte.MIN_VALUE}));
result = Result.create(kvs);
hbaseCellMap.init(result, cfByte, mapBinaryStorage);
expectedByteValue = new ByteWritable(Byte.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfByte, new byte [] {Byte.MAX_VALUE},
new byte [] {Byte.MAX_VALUE}));
result = Result.create(kvs);
hbaseCellMap.init(result, cfByte, mapBinaryStorage);
expectedByteValue = new ByteWritable(Byte.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedByteValue);
assertEquals(expectedByteValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryShortKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<smallint,smallint>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryShortKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfShort = "cf-short".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes((short) 1), Bytes.toBytes((short) 1)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfShort, mapBinaryStorage);
ShortWritable expectedShortValue = new ShortWritable((short) 1);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MIN_VALUE),
Bytes.toBytes(Short.MIN_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfShort, mapBinaryStorage);
expectedShortValue = new ShortWritable(Short.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfShort, Bytes.toBytes(Short.MAX_VALUE),
Bytes.toBytes(Short.MAX_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfShort, mapBinaryStorage);
expectedShortValue = new ShortWritable(Short.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedShortValue);
assertEquals(expectedShortValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryLongKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<bigint,bigint>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryLongKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false, (byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfLong = "cf-long".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes((long) 1), Bytes.toBytes((long) 1)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfLong, mapBinaryStorage);
LongWritable expectedLongValue = new LongWritable(1);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes(Long.MIN_VALUE),
Bytes.toBytes(Long.MIN_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfLong, mapBinaryStorage);
expectedLongValue = new LongWritable(Long.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfLong, Bytes.toBytes(Long.MAX_VALUE),
Bytes.toBytes(Long.MAX_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfLong, mapBinaryStorage);
expectedLongValue = new LongWritable(Long.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedLongValue);
assertEquals(expectedLongValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryFloatKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<float,float>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryFloatKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false,
(byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfFloat = "cf-float".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) 1.0F),
Bytes.toBytes((float) 1.0F)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
FloatWritable expectedFloatValue = new FloatWritable(1.0F);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MIN_VALUE),
Bytes.toBytes((float) Float.MIN_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
expectedFloatValue = new FloatWritable(Float.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfFloat, Bytes.toBytes((float) Float.MAX_VALUE),
Bytes.toBytes((float) Float.MAX_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfFloat, mapBinaryStorage);
expectedFloatValue = new FloatWritable(Float.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedFloatValue);
assertEquals(expectedFloatValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryDoubleKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<double,double>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryDoubleKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false,
(byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfDouble = "cf-double".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(1.0), Bytes.toBytes(1.0)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
DoubleWritable expectedDoubleValue = new DoubleWritable(1.0);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MIN_VALUE),
Bytes.toBytes(Double.MIN_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
expectedDoubleValue = new DoubleWritable(Double.MIN_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfDouble, Bytes.toBytes(Double.MAX_VALUE),
Bytes.toBytes(Double.MAX_VALUE)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfDouble, mapBinaryStorage);
expectedDoubleValue = new DoubleWritable(Double.MAX_VALUE);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedDoubleValue);
assertEquals(expectedDoubleValue, lazyPrimitive.getWritableObject());
TypeInfo mapBinaryBooleanKeyValue =
TypeInfoUtils.getTypeInfoFromTypeString("map<boolean,boolean>");
oi = LazyFactory.createLazyObjectInspector(
mapBinaryBooleanKeyValue, new byte [] {(byte) 1, (byte) 2}, 0, nullSequence, false,
(byte) 0);
hbaseCellMap = new LazyHBaseCellMap((LazyMapObjectInspector) oi);
byte [] cfBoolean = "cf-boolean".getBytes();
kvs.clear();
kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(false), Bytes.toBytes(false)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfBoolean, mapBinaryStorage);
BooleanWritable expectedBooleanValue = new BooleanWritable(false);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedBooleanValue);
assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject());
kvs.clear();
kvs.add(new KeyValue(rowKey, cfBoolean, Bytes.toBytes(true), Bytes.toBytes(true)));
result = Result.create(kvs);
hbaseCellMap.init(result, cfBoolean, mapBinaryStorage);
expectedBooleanValue = new BooleanWritable(true);
lazyPrimitive =
(LazyPrimitive<?, ?>) hbaseCellMap.getMapValueElement(expectedBooleanValue);
assertEquals(expectedBooleanValue, lazyPrimitive.getWritableObject());
}
/**
* Test the LazyHBaseRow class with one-for-one mappings between
* Hive fields and HBase columns.
* @throws SerDeException
*/
public void testLazyHBaseRow1() throws SerDeException {
List<TypeInfo> fieldTypeInfos =
TypeInfoUtils.getTypeInfosFromTypeString(
"string,int,array<string>,map<string,string>,string");
List<String> fieldNames = Arrays.asList("key", "a", "b", "c", "d");
Text nullSequence = new Text("\\N");
String hbaseColsMapping = ":key,cfa:a,cfa:b,cfb:c,cfb:d";
ColumnMappings columnMappings = null;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColsMapping);
} catch (SerDeException e) {
fail(e.toString());
}
for (ColumnMapping colMap : columnMappings) {
if (!colMap.hbaseRowKey && colMap.qualifierName == null) {
colMap.binaryStorage.add(false);
colMap.binaryStorage.add(false);
} else {
colMap.binaryStorage.add(false);
}
}
ObjectInspector oi = LazyFactory.createLazyStructInspector(fieldNames,
fieldTypeInfos, new byte[] {' ', ':', '='},
nullSequence, false, false, (byte)0);
LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
List<Cell> kvs = new ArrayList<Cell>();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("a:b:c")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("c"), Bytes.toBytes("d=e:f=g")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("hi")));
Result r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':123,'b':['a','b','c'],"
+ "'c':{'d':'e','f':'g'},'d':'hi'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("c"), Bytes.toBytes("d=e:f=g")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':123,'b':null,"
+ "'c':{'d':'e','f':'g'},'d':null}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("a")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("c"), Bytes.toBytes("d=\\N:f=g:h")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("no")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':null,'b':['a'],"
+ "'c':{'d':null,'f':'g','h':null},'d':'no'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes(":a::")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("no")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':null,'b':['','a','',''],"
+ "'c':null,'d':'no'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
// This is intentionally duplicated because of HIVE-3179
assertEquals(
("{'key':'test-row','a':null,'b':['','a','',''],"
+ "'c':null,'d':'no'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("c"), Bytes.toBytes("")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("")));
r = Result.create(kvs);
o.init(r);
assertEquals(
"{'key':'test-row','a':123,'b':[],'c':{},'d':''}".replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
}
/**
* Test the LazyHBaseRow class with a mapping from a Hive field to
* an HBase column family.
* @throws SerDeException
*/
public void testLazyHBaseRow2() throws SerDeException {
// column family is mapped to Map<string,string>
List<TypeInfo> fieldTypeInfos =
TypeInfoUtils.getTypeInfosFromTypeString(
"string,int,array<string>,map<string,string>,string");
List<String> fieldNames = Arrays.asList(
new String[]{"key", "a", "b", "c", "d"});
Text nullSequence = new Text("\\N");
String hbaseColsMapping = ":key,cfa:a,cfa:b,cfb:,cfc:d";
ColumnMappings columnMappings = null;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColsMapping);
} catch (SerDeException e) {
fail(e.toString());
}
for (ColumnMapping colMap : columnMappings) {
if (!colMap.hbaseRowKey && colMap.qualifierName == null) {
colMap.binaryStorage.add(false);
colMap.binaryStorage.add(false);
} else {
colMap.binaryStorage.add(false);
}
}
ObjectInspector oi = LazyFactory.createLazyStructInspector(
fieldNames,
fieldTypeInfos,
new byte[] {' ', ':', '='},
nullSequence, false, false, (byte) 0);
LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
List<Cell> kvs = new ArrayList<Cell>();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("a:b:c")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("e")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("f"), Bytes.toBytes("g")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfc"), Bytes.toBytes("d"), Bytes.toBytes("hi")));
Result r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':123,'b':['a','b','c'],"
+ "'c':{'d':'e','f':'g'},'d':'hi'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("d"), Bytes.toBytes("e")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("f"), Bytes.toBytes("g")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':123,'b':null,"
+ "'c':{'d':'e','f':'g'},'d':null}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("a")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfb"), Bytes.toBytes("f"), Bytes.toBytes("g")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfc"), Bytes.toBytes("d"), Bytes.toBytes("no")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':null,'b':['a'],"
+ "'c':{'f':'g'},'d':'no'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes(":a::")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfc"), Bytes.toBytes("d"), Bytes.toBytes("no")));
r = Result.create(kvs);
o.init(r);
assertEquals(
("{'key':'test-row','a':null,'b':['','a','',''],"
+ "'c':{},'d':'no'}").replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
kvs.clear();
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("a"), Bytes.toBytes("123")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfa"), Bytes.toBytes("b"), Bytes.toBytes("")));
kvs.add(new KeyValue(Bytes.toBytes("test-row"),
Bytes.toBytes("cfc"), Bytes.toBytes("d"), Bytes.toBytes("")));
r = Result.create(kvs);
o.init(r);
assertEquals(
"{'key':'test-row','a':123,'b':[],'c':{},'d':''}".replace("'", "\""),
SerDeUtils.getJSONString(o, oi));
}
/**
* Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
* HBase column family/column qualifier pairs. The column types are primitive and fields
* are stored in binary format in HBase.
* @throws SerDeException
*/
public void testLazyHBaseRow3() throws SerDeException {
List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString(
"string,int,tinyint,smallint,bigint,float,double,string,boolean");
List<String> fieldNames = Arrays.asList(
new String [] {"key", "c_int", "c_byte", "c_short", "c_long", "c_float", "c_double",
"c_string", "c_bool"});
Text nullSequence = new Text("\\N");
String hbaseColumnsMapping = ":key#str,cf-int:cq-int#bin,cf-byte:cq-byte#bin,"
+ "cf-short:cq-short#bin,cf-long:cq-long#bin,cf-float:cq-float#bin,cf-double:cq-double#bin,"
+ "cf-string:cq-string#str,cf-bool:cq-bool#bin";
ColumnMappings columnMappings = null;
try {
columnMappings = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
} catch (SerDeException e) {
fail(e.toString());
}
ColumnMapping[] columnsMapping = columnMappings.getColumnsMapping();
for (int i = 0; i < columnsMapping.length; i++) {
ColumnMapping colMap = columnsMapping[i];
if (i == 0 || i == 7) {
colMap.binaryStorage.add(false);
} else {
colMap.binaryStorage.add(true);
}
}
ObjectInspector oi =
LazyFactory.createLazyStructInspector(fieldNames, fieldTypeInfos,
new byte [] {' ', ':', '='}, nullSequence, false, false, (byte) 0);
LazyHBaseRow o = new LazyHBaseRow((LazySimpleStructObjectInspector) oi, columnMappings);
byte [] rowKey = "row-key".getBytes();
List<Cell> kvs = new ArrayList<Cell>();
byte [] value;
for (int i = 1; i < columnsMapping.length; i++) {
switch (i) {
case 1:
value = Bytes.toBytes(1);
break;
case 2:
value = new byte[]{(byte)1};
break;
case 3:
value = Bytes.toBytes((short) 1);
break;
case 4:
value = Bytes.toBytes((long) 1);
break;
case 5:
value = Bytes.toBytes((float) 1.0F);
break;
case 6:
value = Bytes.toBytes((double) 1.0);
break;
case 7:
value = "Hadoop, Hive, with HBase storage handler.".getBytes();
break;
case 8:
value = Bytes.toBytes(true);
break;
default:
throw new RuntimeException("Not expected: " + i);
}
ColumnMapping colMap = columnsMapping[i];
kvs.add(new KeyValue(rowKey, colMap.familyNameBytes, colMap.qualifierNameBytes, value));
}
Collections.sort(kvs, KeyValue.COMPARATOR);
Result result = Result.create(kvs);
o.init(result);
List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = ((StructObjectInspector) oi).getStructFieldData(o, fieldRefs.get(i));
assert(fieldData != null);
assert(fieldData instanceof LazyPrimitive<?, ?>);
Writable writable = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
switch (i) {
case 0:
Text text = new Text("row-key");
assertEquals(text, writable);
break;
case 1:
IntWritable iw = new IntWritable(1);
assertEquals(iw, writable);
break;
case 2:
ByteWritable bw = new ByteWritable((byte) 1);
assertEquals(bw, writable);
break;
case 3:
ShortWritable sw = new ShortWritable((short) 1);
assertEquals(sw, writable);
break;
case 4:
LongWritable lw = new LongWritable(1);
assertEquals(lw, writable);
break;
case 5:
FloatWritable fw = new FloatWritable(1.0F);
assertEquals(fw, writable);
break;
case 6:
DoubleWritable dw = new DoubleWritable(1.0);
assertEquals(dw, writable);
break;
case 7:
Text t = new Text("Hadoop, Hive, with HBase storage handler.");
assertEquals(t, writable);
break;
case 8:
BooleanWritable boolWritable = new BooleanWritable(true);
assertEquals(boolWritable, writable);
break;
default:
fail("Error: Unanticipated value in deserializing fields for HBaseSerDe.");
break;
}
}
}
}
| |
/**
* Copyright (C) 2013 by Raphael Michel under the MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software
* is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package de.geeksfactory.opacclient.storage;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.database.Cursor;
import android.net.Uri;
import java.util.List;
import de.geeksfactory.opacclient.BuildConfig;
public class StarContentProvider extends ContentProvider {
public static final String STAR_TYPE = "star";
private static final String STAR_MIME_POSTFIX = "/vnd.de.opacapp.type"
+ STAR_TYPE;
public static final String AUTHORITY = BuildConfig.APPLICATION_ID + ".starprovider";
public static final String BASE_URI = "content://" + AUTHORITY + "/";
public static final Uri STAR_URI = Uri.parse(BASE_URI + STAR_TYPE);
private static final String MIME_PREFIX = "vnd.android.cursor.";
private static final String STAR_DIR_MIME = MIME_PREFIX + "dir"
+ STAR_MIME_POSTFIX;
private static final String STAR_ITEM_MIME = MIME_PREFIX + "item"
+ STAR_MIME_POSTFIX;
private StarDatabase database;
private static Mime getTypeMime(Uri uri) {
if (!AUTHORITY.equals(uri.getAuthority())
&& !uri.getAuthority().startsWith("de.opacapp.")
&& !uri.getAuthority().startsWith("net.opacapp.")) {
return null;
}
List<String> segments = uri.getPathSegments();
if (segments == null || segments.size() == 0) {
return null;
}
String type = segments.get(0);
if (STAR_TYPE.equals(type)) {
switch (segments.size()) {
case 1:
return Mime.STAR_DIR;
case 2:
return Mime.STAR_ITEM;
default:
return null;
}
} else {
return null;
}
}
@Override
public boolean onCreate() {
database = new StarDatabase(getContext());
return true;
}
@Override
public String getType(Uri uri) {
switch (getTypeMime(uri)) {
case STAR_DIR:
return STAR_DIR_MIME;
case STAR_ITEM:
return STAR_ITEM_MIME;
default:
return null;
}
}
private int deleteInDatabase(String table, String whereClause,
String[] whereArgs) {
return database.getWritableDatabase().delete(table, whereClause, whereArgs);
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
int rowsAffected;
switch (getTypeMime(uri)) {
case STAR_DIR:
rowsAffected = deleteInDatabase(StarDatabase.STAR_TABLE, selection, selectionArgs);
break;
case STAR_ITEM:
rowsAffected = deleteInDatabase(StarDatabase.STAR_TABLE,
StarDatabase.STAR_WHERE_ID, selectionForUri(uri));
break;
default:
rowsAffected = 0;
break;
}
if (rowsAffected > 0) {
notifyUri(uri);
}
return rowsAffected;
}
private long insertIntoDatabase(String table, ContentValues values) {
return database.getWritableDatabase()
.insertOrThrow(table, null, values);
}
@Override
public Uri insert(Uri uri, ContentValues values) {
Uri itemUri;
long id;
switch (getTypeMime(uri)) {
case STAR_DIR:
id = insertIntoDatabase(StarDatabase.STAR_TABLE, values);
itemUri = ContentUris.withAppendedId(STAR_URI, id);
notifyUri(STAR_URI);
break;
case STAR_ITEM:
default:
itemUri = null;
break;
}
if (itemUri != null) {
notifyUri(uri);
}
return itemUri;
}
private Cursor queryDatabase(String table, String[] projection,
String selection, String[] selectionArgs, String groupBy,
String having, String orderBy) {
return database.getReadableDatabase().query(table, projection,
selection, selectionArgs, groupBy, having, orderBy);
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
Cursor cursor;
switch (getTypeMime(uri)) {
case STAR_DIR:
cursor = queryDatabase(StarDatabase.STAR_TABLE, projection,
selection, selectionArgs, null, null, sortOrder);
break;
case STAR_ITEM:
cursor = queryDatabase(StarDatabase.STAR_TABLE, projection,
StarDatabase.STAR_WHERE_ID, selectionForUri(uri), null,
null, sortOrder);
break;
default:
return null;
}
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
private int updateInDatabase(String table, ContentValues values,
String selection, String[] selectionArgs) {
return database.getWritableDatabase().update(table, values, selection,
selectionArgs);
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
int rowsAffected;
switch (getTypeMime(uri)) {
case STAR_DIR:
rowsAffected = updateInDatabase(StarDatabase.STAR_TABLE, values,
selection, selectionArgs);
break;
case STAR_ITEM:
rowsAffected = updateInDatabase(StarDatabase.STAR_TABLE, values,
StarDatabase.STAR_WHERE_ID, selectionForUri(uri));
break;
default:
rowsAffected = 0;
break;
}
if (rowsAffected > 0) {
notifyUri(uri);
}
return rowsAffected;
}
private void notifyUri(Uri uri) {
getContext().getContentResolver().notifyChange(uri, null);
}
private String[] selectionForUri(Uri uri) {
return new String[]{String.valueOf(ContentUris.parseId(uri))};
}
private enum Mime {
STAR_ITEM, STAR_DIR
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.phreak;
import org.drools.core.common.EventFactHandle;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalKnowledgeRuntime;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.InternalWorkingMemoryEntryPoint;
import org.drools.core.impl.StatefulKnowledgeSessionImpl.WorkingMemoryReteExpireAction;
import org.drools.core.reteoo.ClassObjectTypeConf;
import org.drools.core.reteoo.CompositePartitionAwareObjectSinkAdapter;
import org.drools.core.reteoo.EntryPointNode;
import org.drools.core.reteoo.ModifyPreviousTuples;
import org.drools.core.reteoo.ObjectTypeConf;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.spi.PropagationContext;
import org.drools.core.time.JobContext;
import org.drools.core.time.JobHandle;
import org.drools.core.time.impl.PointInTimeTrigger;
import java.util.concurrent.CountDownLatch;
public interface PropagationEntry {
void execute(InternalWorkingMemory wm);
void execute(InternalKnowledgeRuntime kruntime);
PropagationEntry getNext();
void setNext(PropagationEntry next);
boolean requiresImmediateFlushing();
boolean isCalledFromRHS();
boolean isPartitionSplittable();
PropagationEntry getSplitForPartition(int partitionNr);
abstract class AbstractPropagationEntry implements PropagationEntry {
private PropagationEntry next;
public void setNext(PropagationEntry next) {
this.next = next;
}
public PropagationEntry getNext() {
return next;
}
@Override
public boolean requiresImmediateFlushing() {
return false;
}
@Override
public boolean isCalledFromRHS() {
return false;
}
@Override
public void execute(InternalKnowledgeRuntime kruntime) {
execute( ((InternalWorkingMemoryEntryPoint) kruntime).getInternalWorkingMemory() );
}
@Override
public boolean isPartitionSplittable() {
return false;
}
@Override
public PropagationEntry getSplitForPartition(int partitionNr) {
throw new UnsupportedOperationException();
}
}
abstract class AbstractPartitionedPropagationEntry extends AbstractPropagationEntry {
protected final int partition;
protected AbstractPartitionedPropagationEntry( int partition ) {
this.partition = partition;
}
protected boolean isMasterPartition() {
return partition == 0;
}
}
abstract class PropagationEntryWithResult<T> extends PropagationEntry.AbstractPropagationEntry {
private final CountDownLatch done = new CountDownLatch( 1 );
private T result;
public final T getResult() {
try {
done.await();
} catch (InterruptedException e) {
throw new RuntimeException( e );
}
return result;
}
protected void done(T result) {
this.result = result;
done.countDown();
}
@Override
public boolean requiresImmediateFlushing() {
return true;
}
}
class Insert extends AbstractPropagationEntry {
private static final transient ObjectTypeNode.ExpireJob job = new ObjectTypeNode.ExpireJob();
private final InternalFactHandle handle;
private final PropagationContext context;
private final ObjectTypeConf objectTypeConf;
public Insert( InternalFactHandle handle, PropagationContext context, InternalWorkingMemory workingMemory, ObjectTypeConf objectTypeConf) {
this.handle = handle;
this.context = context;
this.objectTypeConf = objectTypeConf;
if ( objectTypeConf.isEvent() ) {
scheduleExpiration(workingMemory, handle, context, objectTypeConf, workingMemory.getTimerService().getCurrentTime());
}
}
public static void execute( InternalFactHandle handle, PropagationContext context, InternalWorkingMemory wm, ObjectTypeConf objectTypeConf) {
if ( objectTypeConf.isEvent() ) {
scheduleExpiration(wm, handle, context, objectTypeConf, wm.getTimerService().getCurrentTime());
}
propagate( handle, context, wm, objectTypeConf );
}
private static void propagate( InternalFactHandle handle, PropagationContext context, InternalWorkingMemory wm, ObjectTypeConf objectTypeConf ) {
for ( ObjectTypeNode otn : objectTypeConf.getObjectTypeNodes() ) {
otn.propagateAssert( handle, context, wm );
}
}
public void execute( InternalWorkingMemory wm ) {
propagate( handle, context, wm, objectTypeConf );
}
private static void scheduleExpiration(InternalWorkingMemory wm, InternalFactHandle handle, PropagationContext context, ObjectTypeConf objectTypeConf, long insertionTime) {
for ( ObjectTypeNode otn : objectTypeConf.getObjectTypeNodes() ) {
scheduleExpiration( wm, handle, context, otn, insertionTime, otn.getExpirationOffset() );
}
if ( objectTypeConf.getConcreteObjectTypeNode() == null ) {
scheduleExpiration( wm, handle, context, null, insertionTime, ( (ClassObjectTypeConf) objectTypeConf ).getExpirationOffset() );
}
}
private static void scheduleExpiration( InternalWorkingMemory wm, InternalFactHandle handle, PropagationContext context, ObjectTypeNode otn, long insertionTime, long expirationOffset ) {
if ( expirationOffset < 0 || expirationOffset == Long.MAX_VALUE || context.getReaderContext() != null ) {
return;
}
// DROOLS-455 the calculation of the effectiveEnd may overflow and become negative
EventFactHandle eventFactHandle = (EventFactHandle) handle;
long nextTimestamp = getNextTimestamp( insertionTime, expirationOffset, eventFactHandle );
WorkingMemoryReteExpireAction action = new WorkingMemoryReteExpireAction( (EventFactHandle) handle, otn );
if (nextTimestamp < wm.getTimerService().getCurrentTime()) {
wm.addPropagation( action );
} else {
JobContext jobctx = new ObjectTypeNode.ExpireJobContext( action, wm );
JobHandle jobHandle = wm.getTimerService()
.scheduleJob( job,
jobctx,
new PointInTimeTrigger( nextTimestamp, null, null ) );
jobctx.setJobHandle( jobHandle );
eventFactHandle.addJob( jobHandle );
}
}
private static long getNextTimestamp( long insertionTime, long expirationOffset, EventFactHandle eventFactHandle ) {
long effectiveEnd = eventFactHandle.getEndTimestamp() + expirationOffset;
return Math.max( insertionTime, effectiveEnd >= 0 ? effectiveEnd : Long.MAX_VALUE );
}
@Override
public String toString() {
return "Insert of " + handle.getObject();
}
}
class Update extends AbstractPropagationEntry {
private final InternalFactHandle handle;
private final PropagationContext context;
private final ObjectTypeConf objectTypeConf;
public Update(InternalFactHandle handle, PropagationContext context, ObjectTypeConf objectTypeConf) {
this.handle = handle;
this.context = context;
this.objectTypeConf = objectTypeConf;
}
public void execute(InternalWorkingMemory wm) {
EntryPointNode.propagateModify(handle, context, objectTypeConf, wm);
}
@Override
public boolean isPartitionSplittable() {
return true;
}
@Override
public PropagationEntry getSplitForPartition( int partitionNr ) {
return new PartitionedUpdate( handle, context, objectTypeConf, partitionNr );
}
@Override
public String toString() {
return "Update of " + handle.getObject();
}
}
class PartitionedUpdate extends AbstractPartitionedPropagationEntry {
private final InternalFactHandle handle;
private final PropagationContext context;
private final ObjectTypeConf objectTypeConf;
PartitionedUpdate(InternalFactHandle handle, PropagationContext context, ObjectTypeConf objectTypeConf, int partition) {
super( partition );
this.handle = handle;
this.context = context;
this.objectTypeConf = objectTypeConf;
}
public void execute(InternalWorkingMemory wm) {
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples( handle.detachLinkedTuplesForPartition(partition) );
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
for ( int i = 0, length = cachedNodes.length; i < length; i++ ) {
ObjectTypeNode otn = cachedNodes[i];
( (CompositePartitionAwareObjectSinkAdapter) otn.getObjectSinkPropagator() )
.propagateModifyObjectForPartition( handle, modifyPreviousTuples,
context.adaptModificationMaskForObjectType(otn.getObjectType(), wm),
wm, partition );
if (i < cachedNodes.length - 1) {
EntryPointNode.removeRightTuplesMatchingOTN( context, wm, modifyPreviousTuples, otn );
}
}
modifyPreviousTuples.retractTuples(context, wm);
}
@Override
public String toString() {
return "Update of " + handle.getObject() + " for partition " + partition;
}
}
class Delete extends AbstractPropagationEntry {
private final EntryPointNode epn;
private final InternalFactHandle handle;
private final PropagationContext context;
private final ObjectTypeConf objectTypeConf;
public Delete(EntryPointNode epn, InternalFactHandle handle, PropagationContext context, ObjectTypeConf objectTypeConf) {
this.epn = epn;
this.handle = handle;
this.context = context;
this.objectTypeConf = objectTypeConf;
}
public void execute(InternalWorkingMemory wm) {
epn.propagateRetract(handle, context, objectTypeConf, wm);
}
@Override
public boolean isPartitionSplittable() {
return true;
}
@Override
public PropagationEntry getSplitForPartition( int partitionNr ) {
return new PartitionedDelete( handle, context, objectTypeConf, partitionNr );
}
@Override
public String toString() {
return "Delete of " + handle.getObject();
}
}
class PartitionedDelete extends AbstractPartitionedPropagationEntry {
private final InternalFactHandle handle;
private final PropagationContext context;
private final ObjectTypeConf objectTypeConf;
PartitionedDelete(InternalFactHandle handle, PropagationContext context, ObjectTypeConf objectTypeConf, int partition) {
super( partition );
this.handle = handle;
this.context = context;
this.objectTypeConf = objectTypeConf;
}
public void execute(InternalWorkingMemory wm) {
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
if ( cachedNodes == null ) {
// it is possible that there are no ObjectTypeNodes for an object being retracted
return;
}
for ( ObjectTypeNode cachedNode : cachedNodes ) {
cachedNode.retractObject( handle, context, wm, partition );
}
if (handle.isEvent() && isMasterPartition()) {
((EventFactHandle) handle).unscheduleAllJobs(wm);
}
}
@Override
public String toString() {
return "Delete of " + handle.getObject() + " for partition " + partition;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.client.core.v4;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.InputStream;
import java.net.URI;
import java.util.Iterator;
import org.apache.olingo.client.api.EdmEnabledODataClient;
import org.apache.olingo.client.api.ODataClient;
import org.apache.olingo.client.api.domain.ClientAnnotation;
import org.apache.olingo.client.api.domain.ClientComplexValue;
import org.apache.olingo.client.api.domain.ClientEntity;
import org.apache.olingo.client.api.domain.ClientInlineEntitySet;
import org.apache.olingo.client.api.domain.ClientLink;
import org.apache.olingo.client.api.domain.ClientLinkType;
import org.apache.olingo.client.api.domain.ClientProperty;
import org.apache.olingo.client.api.domain.ClientValuable;
import org.apache.olingo.client.api.domain.ClientValue;
import org.apache.olingo.client.core.AbstractTest;
import org.apache.olingo.client.core.EdmEnabledODataClientImpl;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.data.ResWrap;
import org.apache.olingo.commons.api.edm.Edm;
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeException;
import org.apache.olingo.commons.api.format.ContentType;
import org.apache.olingo.commons.core.edm.primitivetype.EdmDateTimeOffset;
import org.apache.olingo.commons.core.edm.primitivetype.EdmDuration;
import org.junit.Ignore;
import org.junit.Test;
public class EntityTest extends AbstractTest {
@Override
protected ODataClient getClient() {
return v4Client;
}
private EdmEnabledODataClient getEdmEnabledClient() {
return new EdmEnabledODataClientImpl(null, null, null) {
private Edm edm;
@Override
public Edm getEdm(final String metadataETag) {
return getCachedEdm();
}
@Override
public Edm getCachedEdm() {
if (edm == null) {
edm = getReader().readMetadata(getClass().getResourceAsStream("staticservice-metadata.xml"));
}
return edm;
}
};
}
private void singleton(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("VipCustomer." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.Customer", entity.getTypeName().toString());
final ClientProperty birthday = entity.getProperty("Birthday");
assertTrue(birthday.hasPrimitiveValue());
assertEquals(EdmDateTimeOffset.getInstance(), birthday.getPrimitiveValue().getType());
final ClientProperty timeBetweenLastTwoOrders = entity.getProperty("TimeBetweenLastTwoOrders");
assertTrue(timeBetweenLastTwoOrders.hasPrimitiveValue());
assertEquals(EdmDuration.getInstance(), timeBetweenLastTwoOrders.getPrimitiveValue().getType());
int checked = 0;
for (ClientLink link : entity.getNavigationLinks()) {
if ("Parent".equals(link.getName())) {
checked++;
assertEquals(ClientLinkType.ENTITY_NAVIGATION, link.getType());
}
if ("Orders".equals(link.getName())) {
checked++;
if (contentType.isCompatible(ContentType.APPLICATION_ATOM_SVC)
|| contentType.isCompatible(ContentType.APPLICATION_ATOM_XML)) {
assertEquals(ClientLinkType.ENTITY_SET_NAVIGATION, link.getType());
}
}
if ("Company".equals(link.getName())) {
checked++;
assertEquals(ClientLinkType.ENTITY_NAVIGATION, link.getType());
}
}
assertEquals(3, checked);
assertEquals(2, entity.getOperations().size());
assertEquals("#Microsoft.Test.OData.Services.ODataWCFService.ResetAddress",
entity.getOperation("Microsoft.Test.OData.Services.ODataWCFService.ResetAddress").getMetadataAnchor());
assertEquals("#Microsoft.Test.OData.Services.ODataWCFService.GetHomeAddress",
entity.getOperation("Microsoft.Test.OData.Services.ODataWCFService.GetHomeAddress").getMetadataAnchor());
// operations won't get serialized
entity.getOperations().clear();
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomSingleton() throws Exception {
singleton(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonSingleton() throws Exception {
singleton(ContentType.JSON_FULL_METADATA);
}
private void withEnums(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("Products_5." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
final ClientProperty skinColor = entity.getProperty("SkinColor");
assertTrue(skinColor.hasEnumValue());
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.Color", skinColor.getEnumValue().getTypeName());
assertEquals("Red", skinColor.getEnumValue().getValue());
final ClientProperty coverColors = entity.getProperty("CoverColors");
assertTrue(coverColors.hasCollectionValue());
for (final Iterator<ClientValue> itor = coverColors.getCollectionValue().iterator(); itor.hasNext();) {
final ClientValue item = itor.next();
assertTrue(item.isEnum());
}
// operations won't get serialized
entity.getOperations().clear();
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomWithEnums() throws Exception {
withEnums(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonWithEnums() throws Exception {
withEnums(ContentType.JSON_FULL_METADATA);
}
private void withInlineEntitySet(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream(
"Accounts_101_expand_MyPaymentInstruments." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
final ClientLink instruments = entity.getNavigationLink("MyPaymentInstruments");
assertNotNull(instruments);
assertEquals(ClientLinkType.ENTITY_SET_NAVIGATION, instruments.getType());
final ClientInlineEntitySet inline = instruments.asInlineEntitySet();
assertNotNull(inline);
assertEquals(3, inline.getEntitySet().getEntities().size());
// count shouldn't be serialized
inline.getEntitySet().setCount(3);
// operations won't get serialized
entity.getOperations().clear();
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomWithInlineEntitySet() throws Exception {
withInlineEntitySet(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonWithInlineEntitySet() throws Exception {
withInlineEntitySet(ContentType.JSON_FULL_METADATA);
}
private void mediaEntity(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream(
"Advertisements_f89dee73-af9f-4cd4-b330-db93c25ff3c7." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertTrue(entity.isMediaEntity());
assertNotNull(entity.getMediaContentSource());
assertEquals("\"8zOOKKvgOtptr4gt8IrnapX3jds=\"", entity.getMediaETag());
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomMediaEntity() throws Exception {
mediaEntity(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonMediaEntity() throws Exception {
mediaEntity(ContentType.JSON_FULL_METADATA);
}
private void withStream(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("PersonDetails_1." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertFalse(entity.isMediaEntity());
final ClientLink editMedia = entity.getMediaEditLink("Photo");
assertNotNull(editMedia);
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomWithStream() throws Exception {
withStream(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonWithStream() throws Exception {
withStream(ContentType.JSON_FULL_METADATA);
}
private void ref(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("entityReference." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertNotNull(entity.getId());
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomRef() throws Exception {
ref(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonRef() throws Exception {
ref(ContentType.JSON);
}
private void complexNavigationProperties(final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("entity.withcomplexnavigation." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
final ClientComplexValue addressValue = entity.getProperty("Address").getComplexValue();
assertNotNull(addressValue);
assertNotNull(addressValue.getNavigationLink("Country"));
// ETag is not serialized
entity.setETag(null);
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
public void atomComplexNavigationProperties() throws Exception {
complexNavigationProperties(ContentType.APPLICATION_ATOM_XML);
}
@Test
public void jsonComplexNavigationProperties() throws Exception {
complexNavigationProperties(ContentType.JSON);
}
private void annotated(final ContentType contentType) throws EdmPrimitiveTypeException, Exception {
final InputStream input = getClass().getResourceAsStream("annotated." + getSuffix(contentType));
final ClientEntity entity = getClient().getBinder().getODataEntity(
getClient().getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertFalse(entity.getAnnotations().isEmpty());
ClientAnnotation annotation = entity.getAnnotations().get(0);
assertEquals("com.contoso.display.highlight", annotation.getTerm());
assertEquals(true, annotation.getPrimitiveValue().toCastValue(Boolean.class));
annotation = entity.getAnnotations().get(1);
assertEquals("com.contoso.PersonalInfo.PhoneNumbers", annotation.getTerm());
assertTrue(annotation.hasCollectionValue());
annotation = entity.getProperty("LastName").getAnnotations().get(0);
assertEquals("com.contoso.display.style", annotation.getTerm());
assertTrue(annotation.hasComplexValue());
final ClientLink orders = entity.getNavigationLink("Orders");
assertFalse(orders.getAnnotations().isEmpty());
annotation = orders.getAnnotations().get(0);
assertEquals("com.contoso.display.style", annotation.getTerm());
assertEquals("com.contoso.display.styleType", annotation.getValue().getTypeName());
assertTrue(annotation.hasComplexValue());
assertEquals(2,
annotation.getValue().asComplex().get("order").getPrimitiveValue().toCastValue(Integer.class), 0);
final ClientEntity written = getClient().getBinder().getODataEntity(
new ResWrap<Entity>((URI) null, null, getClient().getBinder().getEntity(entity)));
assertEquals(entity, written);
input.close();
}
@Test
@Ignore
public void atomAnnotated() throws Exception {
annotated(ContentType.APPLICATION_ATOM_XML);
}
@Test
@Ignore
public void jsonAnnotated() throws Exception {
annotated(ContentType.JSON);
}
private void derived(final ODataClient client, final ContentType contentType) throws Exception {
final InputStream input = getClass().getResourceAsStream("Customer." + getSuffix(contentType));
final ClientEntity entity = client.getBinder().getODataEntity(client.getDeserializer(contentType).toEntity(input));
assertNotNull(entity);
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.Customer", entity.getTypeName().toString());
assertEquals("Microsoft.Test.OData.Services.ODataWCFService.CompanyAddress",
((ClientValuable) entity.getProperty("HomeAddress")).getValue().getTypeName());
input.close();
}
@Test
public void derivedFromAtom() throws Exception {
derived(getClient(), ContentType.APPLICATION_ATOM_XML);
}
@Test
public void derivedFromJSON() throws Exception {
derived(getEdmEnabledClient(), ContentType.JSON);
}
@Test
public void derivedFromFullJSON() throws Exception {
derived(getClient(), ContentType.JSON_FULL_METADATA);
}
}
| |
/*
* Copyright (C) 2016-2018 Selerity, Inc. (support@seleritycorp.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seleritycorp.common.base.http.client;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.reset;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import com.google.gson.JsonObject;
import com.google.gson.stream.JsonReader;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.ReaderInputStream;
import org.apache.http.HttpEntity;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ContentType;
import org.easymock.Capture;
import org.easymock.EasyMockSupport;
import org.junit.Before;
import org.junit.Test;
public class HttpRequestTest extends EasyMockSupport {
private HttpClient netHttpClient;
private FileHttpClient fileHttpClient;
private HttpResponse.Factory responseFactory;
private HttpResponseStream.Factory responseStreamFactory;
private HttpResponse httpResponse;
private HttpResponseStream httpResponseStream;
private CloseableHttpResponse backendResponse;
private Capture<HttpUriRequest> backendRequestCapture;
@Before
public void setUp() throws Exception {
netHttpClient = createMock(HttpClient.class);
fileHttpClient = createMock(FileHttpClient.class);
responseFactory = createMock(HttpResponse.Factory.class);
responseStreamFactory = createMock(HttpResponseStream.Factory.class);
httpResponse = createMock(HttpResponse.class);
httpResponseStream = createMock(HttpResponseStream.class);
backendResponse = createMock(CloseableHttpResponse.class);
backendRequestCapture = newCapture();
expect(netHttpClient.execute(capture(backendRequestCapture))).andReturn(backendResponse);
expect(responseFactory.create(backendResponse)).andReturn(httpResponse);
}
@Test
public void testExecuteOk() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpResponse response = request.execute();
verifyAll();
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
}
@Test
public void testExecuteMalformedUri() throws Exception {
reset(netHttpClient);
reset(responseFactory);
replayAll();
HttpRequest request = createHttpRequest("http://");
try {
request.execute();
failBecauseExceptionWasNotThrown(HttpException.class);
} catch (HttpException e) {
assertThat(e.getCause()).isInstanceOf(IllegalArgumentException.class);
}
verifyAll();
}
@Test
public void testPerformPerformingFails() throws Exception {
reset(netHttpClient);
reset(responseFactory);
IOException expected = new IOException("catch me");
expect(netHttpClient.execute(capture(backendRequestCapture))).andThrow(expected);
replayAll();
HttpRequest request = createHttpRequest("foo");
try {
request.execute();
failBecauseExceptionWasNotThrown(HttpException.class);
} catch (HttpException e) {
assertThat(e.getCause()).isEqualTo(expected);
}
verifyAll();
}
@Test
public void testExecuteGetWithData() throws Exception {
reset(netHttpClient);
reset(responseFactory);
replayAll();
HttpRequest request = createHttpRequest("foo").addData("bar");
try {
request.execute();
failBecauseExceptionWasNotThrown(HttpException.class);
} catch (HttpException e) {
assertThat(e.getMessage()).contains("data");
}
verifyAll();
}
@Test
public void testExecuteExpecedStatusCode() throws Exception {
expect(httpResponse.getStatusCode()).andReturn(123);
replayAll();
HttpRequest request = createHttpRequest("foo").setExpectedStatusCode(123);
HttpResponse response = request.execute();
verifyAll();
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
}
@Test
public void testExecuteUnexpecedStatusCode() throws Exception {
expect(httpResponse.getStatusCode()).andReturn(200);
replayAll();
HttpRequest request = createHttpRequest("foo").setExpectedStatusCode(123);
try {
request.execute();
failBecauseExceptionWasNotThrown(HttpException.class);
} catch (HttpException e) {
assertThat(e.getMessage()).contains("123");
assertThat(e.getMessage()).contains("200");
}
verifyAll();
}
@Test
public void testSetUserAgentPlain() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting = request.setUserAgent("foo");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
assertThat(backendRequest.getHeaders("User-Agent")).hasSize(1);
assertThat(backendRequest.getFirstHeader("User-Agent").getValue()).isEqualTo("foo");
}
@Test
public void testSetUserAgentOverwrite() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setUserAgent("foo1");
HttpRequest requestAfterSetting2 = request.setUserAgent("foo2");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
assertThat(backendRequest.getHeaders("User-Agent")).hasSize(1);
assertThat(backendRequest.getFirstHeader("User-Agent").getValue()).isEqualTo("foo2");
}
@Test
public void testSetUserAgentReset() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setUserAgent("foo1");
HttpRequest requestAfterSetting2 = request.setUserAgent(null);
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
assertThat(backendRequest.getHeaders("User-Agent")).hasSize(0);
}
@Test
public void testSetReadTimeout() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting = request.setReadTimeoutMillis(4711);
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequestRaw = backendRequestCapture.getValue();
assertThat(backendRequestRaw).isInstanceOf(HttpRequestBase.class);
HttpRequestBase backendRequest = (HttpRequestBase) backendRequestRaw;
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
assertThat(backendRequest.getConfig().getSocketTimeout()).isEqualTo(4711);
}
@Test
public void testSetMethodPost() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting = request.setMethodPost();
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("POST");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
}
@Test
public void testAddDataSingle() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setMethodPost();
HttpRequest requestAfterSetting2 = request.addData("foo=bar%");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequestRaw = backendRequestCapture.getValue();
assertThat(backendRequestRaw).isInstanceOf(HttpEntityEnclosingRequestBase.class);
HttpEntityEnclosingRequestBase backendRequest =
(HttpEntityEnclosingRequestBase) backendRequestRaw;
assertThat(backendRequest.getMethod()).isEqualTo("POST");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
HttpEntity entity = backendRequest.getEntity();
assertThat(entity.getContentType().getValue()).isEqualTo("text/plain; charset=UTF-8");
assertThat(IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8)).isEqualTo("foo=bar%");
}
@Test
public void testAddDataSingleWithContentType() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setMethodPost();
HttpRequest requestAfterSetting2 = request.setContentType(ContentType.APPLICATION_JSON);
HttpRequest requestAfterSetting3 = request.addData("foo=bar%");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(request).isSameAs(requestAfterSetting3);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequestRaw = backendRequestCapture.getValue();
assertThat(backendRequestRaw).isInstanceOf(HttpEntityEnclosingRequestBase.class);
HttpEntityEnclosingRequestBase backendRequest =
(HttpEntityEnclosingRequestBase) backendRequestRaw;
assertThat(backendRequest.getMethod()).isEqualTo("POST");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
HttpEntity entity = backendRequest.getEntity();
assertThat(entity.getContentType().getValue()).isEqualTo("application/json; charset=UTF-8");
assertThat(IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8)).isEqualTo("foo=bar%");
}
@Test
public void testAddDataAppending() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setMethodPost();
HttpRequest requestAfterSetting2 = request.addData("foo=bar%");
HttpRequest requestAfterSetting3 = request.addData("baz&quux");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(request).isSameAs(requestAfterSetting3);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequestRaw = backendRequestCapture.getValue();
assertThat(backendRequestRaw).isInstanceOf(HttpEntityEnclosingRequestBase.class);
HttpEntityEnclosingRequestBase backendRequest =
(HttpEntityEnclosingRequestBase) backendRequestRaw;
assertThat(backendRequest.getMethod()).isEqualTo("POST");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
HttpEntity entity = backendRequest.getEntity();
assertThat(entity.getContentType().getValue()).isEqualTo("text/plain; charset=UTF-8");
assertThat(IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8)).isEqualTo("foo=bar%&baz&quux");
}
@Test
public void testAddDataAppendingWithContentType() throws Exception {
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpRequest requestAfterSetting1 = request.setMethodPost();
HttpRequest requestAfterSetting2 = request.addData("foo=bar%");
HttpRequest requestAfterSetting3 = request.setContentType(ContentType.APPLICATION_JSON);
HttpRequest requestAfterSetting4 = request.addData("baz&quux");
HttpResponse response = request.execute();
verifyAll();
assertThat(request).isSameAs(requestAfterSetting1);
assertThat(request).isSameAs(requestAfterSetting2);
assertThat(request).isSameAs(requestAfterSetting3);
assertThat(request).isSameAs(requestAfterSetting4);
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequestRaw = backendRequestCapture.getValue();
assertThat(backendRequestRaw).isInstanceOf(HttpEntityEnclosingRequestBase.class);
HttpEntityEnclosingRequestBase backendRequest =
(HttpEntityEnclosingRequestBase) backendRequestRaw;
assertThat(backendRequest.getMethod()).isEqualTo("POST");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
HttpEntity entity = backendRequest.getEntity();
assertThat(entity.getContentType().getValue()).isEqualTo("application/json; charset=UTF-8");
assertThat(IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8)).isEqualTo("foo=bar%&baz&quux");
}
@Test
public void testExecuteFileUriOk() throws Exception {
reset(netHttpClient);
expect(fileHttpClient.execute(capture(backendRequestCapture))).andReturn(backendResponse);
replayAll();
HttpRequest request = createHttpRequest("file:///foo");
HttpResponse response = request.execute();
verifyAll();
assertThat(response).isEqualTo(httpResponse);
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("file:///foo");
}
@Test
public void testExecuteAndStreamOk() throws Exception {
reset(responseFactory);
StringReader stringReader = new StringReader("response");
ReaderInputStream readerInputStream = new ReaderInputStream(stringReader);
expect(responseStreamFactory.create(backendResponse)).andReturn(httpResponseStream);
expect(httpResponseStream.getBodyAsStream()).andReturn(readerInputStream);
replayAll();
HttpRequest request = createHttpRequest("foo");
HttpResponseStream response = request.executeAndStream();
String result = IOUtils.toString(response.getBodyAsStream());
verifyAll();
assertThat(result).isEqualTo("response");
HttpUriRequest backendRequest = backendRequestCapture.getValue();
assertThat(backendRequest.getMethod()).isEqualTo("GET");
assertThat(backendRequest.getURI().toString()).isEqualTo("foo");
}
private HttpRequest createHttpRequest(String url) throws HttpException {
return new HttpRequest(url, netHttpClient, fileHttpClient, responseFactory, responseStreamFactory);
}
}
| |
/*-------------------------------------------------------------------------------------------------------------------*\
| Copyright (C) 2014 eBay Software Foundation |
| |
| Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance |
| with the License. |
| |
| You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software distributed under the License is distributed |
| on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for |
| the specific language governing permissions and limitations under the License. |
\*-------------------------------------------------------------------------------------------------------------------*/
package com.paypal.selion.platform.dataprovider;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Hashtable;
import java.util.List;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
/**
* This class provide several methods to retrieve test data from an Excel workbook. Users can get a single row of data
* by providing the excel filename, the data sheet name, and they key. Or get the whole data sheet by providing excel
* file name and the data sheet name.
*
*/
public class SimpleExcelDataProvider extends AbstractExcelDataProvider {
/**
* The constructor will use the path name and the file name of the Excel workbook to initialize the input stream
* before the stream is being used by several methods to get the test data from the Excel workbook.
*
* If pathName is not null then the users deliberately specified the resource file in other location than the
* classpaths. If pathName is null, then the resouce file can be found using the classpath.
*
* <h3>Sample usage:</h3>
*
* <pre>
* String pathName = "src/test/java";
* String fileName = "DataReaderTest.xls"
* LOCAL_DATA myData = new LOCAL_DATA();
* Object [][] myObj;
*
* // To get a single row of excel sheet using a key associated with the data
* myData = (LOCAL_DATA) SimpleExcelDataProvider dataProvider = new SimpleExcelDataProvider(
* pathName, fileName).getSingleExcelRow(myData, "4");
*
* // To get a whole sheet of excel data. This will not need key.
* myObj = new SimpleExcelDataProvider(pathName,
* fileName).getAllExcelRows(myData);
* myData = (LOCAL_DATA)myObj[1][0];
* System.out.println(myObj.seller.bank[0].name);
* </pre>
*
* @param pathName
* the path where the excel file is located.
* @param fileName
* the name of the excel file to be read.
* @throws IOException
*/
public SimpleExcelDataProvider(String pathName, String fileName) throws IOException {
super(pathName, fileName);
}
/**
* Use this constructor when a file that is available in the classpath is to be read by the SimpleExcelDataProvider
* for supporting Data Driven Tests.
*
* @param fileName
* the name of the excel file to be read.
* @throws IOException
*/
public SimpleExcelDataProvider(String fileName) throws IOException {
this(null, fileName);
}
/**
* This function will read all rows of a specified excel sheet and store the data to a hash table. Users can get a
* row of data from the hash table by call a get with a specified key. This excel reader function is for users who
* want to control the data feed to the test cases manually without the benefit of TestNG DataProvider. <br>
* <br>
* <b>Note:</b> Unlike {@link SimpleExcelDataProvider#getAllExcelRows(Object)} this method will skip ALL blank rows
* that may occur in between data in the spreadsheet. <br>
* Hence the number of rows that are fetched by this method and
* {@link SimpleExcelDataProvider#getAllExcelRows(Object)} <b>NEED NOT</b> be the same.
*
* <h3>Example:</h3>
*
* <pre>
* ...
* MyDataStructure myObj = new MyDataStructure();
* HashTable<String, Object> myExcelTableData;
* ...
* myExceltableData = SimpleExcelDataProvider.getAllRowAsHasTable(myObj);
* </pre>
*
* @param myObj
* the user defined type object which provide details structure to this function.
* @return an object of type {@link Hashtable} that represents the excel sheet data in form of hashTable.
* @throws ExcelDataProviderException
* if invalid class name from input parameter myObj
*/
public Hashtable<String, Object> getAllRowsAsHashTable(Object myObj) throws ExcelDataProviderException {
logger.entering(myObj);
Hashtable<String, Object> hashTable = new Hashtable<String, Object>();
Sheet sheet = excelReader.fetchSheet(myObj.getClass().getSimpleName());
int numRows = sheet.getPhysicalNumberOfRows();
for (int i = 2; i <= numRows; i++) {
Row row = sheet.getRow(i - 1);
if ((row != null) && (row.getCell(0) != null)) {
Object obj = getSingleExcelRow(myObj, i, false);
String key = row.getCell(0).toString();
if ((key != null) && (obj != null)) {
hashTable.put(key, obj);
}
}
}
logger.exiting(hashTable);
return hashTable;
}
/**
* This method fetches a specific row from an excel sheet which can be identified using a key and returns the data
* as an Object which can be cast back into the user's actual data type.
*
* @param userObj
* - An Object into which data is to be packed into
* @param key
* - A string that represents a key to search for in the excel sheet
* @return - An Object which can be cast into the user's actual data type.
*
* @throws ExcelDataProviderException
*
*/
@Override
public Object getSingleExcelRow(Object userObj, String key) throws ExcelDataProviderException {
return getSingleExcelRow(userObj, key, true);
}
/**
* This method can be used to fetch a particular row from an excel sheet.
*
* @param userObj
* - The User defined object into which the data is to be packed into.
* @param index
* - The row number from the excel sheet that is to be read. For e.g., if you wanted to read the 2nd row
* (which is where your data exists) in your excel sheet, the value for index would be 1. <b>This method
* assumes that your excel sheet would have a header which it would EXCLUDE.</b> When specifying index
* value always remember to ignore the header, since this method will look for a particular row ignoring
* the header row.
* @return - An object that represents the data for a given row in the excel sheet.
* @throws ExcelDataProviderException
*/
@Override
public Object getSingleExcelRow(Object userObj, int index) throws ExcelDataProviderException {
return getSingleExcelRow(userObj, index, true);
}
/**
* This function will use the input string representing the indexes to collect and return the correct excel sheet
* data rows as two dimensional object to be used as TestNG DataProvider.
*
* @param myData
* the user defined type object which provide details structure to this function.
* @param indexes
* the string represent the keys for the search and return the wanted rows. It is in the format of: <li>
* "1, 2, 3" for individual indexes. <li>"1-4, 6-8, 9-10" for ranges of indexes. <li>
* "1, 3, 5-7, 10, 12-14" for mixing individual and range of indexes.
* @return Object[][] Two dimensional object to be used with TestNG DataProvider
* @throws ExcelDataProviderException
*/
@Override
public Object[][] getExcelRows(Object myData, String indexes) throws ExcelDataProviderException {
logger.entering(new Object[] { myData, indexes });
List<Integer> arrayIndex = null;
try {
arrayIndex = DataProviderHelper.parseIndexString(indexes);
} catch (DataProviderException e) {
throw new ExcelDataProviderException(e.getMessage(), e);
}
Object[][] obj = new Object[arrayIndex.size()][1];
for (int i = 0; i < arrayIndex.size(); i++) {
int actualIndex = arrayIndex.get(i) + 1;
obj[i][0] = getSingleExcelRow(myData, actualIndex, false);
}
logger.exiting(obj);
return obj;
}
/**
* This function will use the input string representing the keys to collect and return the correct excel sheet data
* rows as two dimensional object to be used as TestNG DataProvider.
*
* @param myObj
* the user defined type object which provides details structure to this function.
* @param keys
* the string represents the list of key for the search and return the wanted row. It is in the format of
* {"row1", "row3", "row5"}
* @return Object[][] two dimensional object to be used with TestNG DataProvider
* @throws ExcelDataProviderException
*/
@Override
public Object[][] getExcelRows(Object myObj, String[] keys) throws ExcelDataProviderException {
logger.entering(new Object[] { myObj, keys });
Object[][] obj = new Object[keys.length][1];
for (int i = 0; i < keys.length; i++) {
obj[i][0] = getSingleExcelRow(myObj, keys[i], true);
}
logger.exiting(obj);
return obj;
}
/**
* This function will read the whole excel sheet and map the data into two-dimensional array of object which is
* compatible with TestNG DataProvider to provide real test data driven development. This function will ignore all
* rows in which keys are preceded by "#" as a comment character.
*
* For the function to work, the sheet names have to be exactly named as the user defined data type. In the example
* below, there must be a sheet name "LOCAL_DATA" in the workbook.
*
* <h3>Example how to use TestNG DataProvider:</h3>
*
* <pre>
* '@DataProvider(name = "dataProvider1")'
* public Object[][] createData1() throws Exception {
*
* // Declare your objects
* String pathName = "src/test/java/com/paypal/test/datareader";
* String fileName = "DataReader.xls";
*
* // Declare your data block
* LOCAL_DATA myData = new LOCAL_DATA();
*
* // Pass your data block to "getAllExcelRows"
* Object[][] object = new SimpleExcelDataProvider(pathName, fileName)
* .getAllExcelRows(myData);
*
* // return the two-dimensional array object
* return object;
* }
*
* // Specify our TestNG DataProvider
* '@Test(dataProvider = "dataProvider1")'
* public void verifyLocalData1(LOCAL_DATA data) {
* // Your data will be distribute to your test case
* // one row per instance, and all can be run at the same time.
* System.out.println("Name: " + data.name);
* System.out.println("Password: " + data.password);
* System.out.println("the bank: " + data.bank.bankName);
*
* System.out.println("Ph1: " + data.phone.areaCode);
* System.out.println("Ph2: " + data.cell.areaCode);
* System.out.println("Bank Address: " + data.bank.address.street);
* System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
* }
* </pre>
*
* @param myObj
* the user defined type object which provide details structure to this function.
* @return Object[][] a two-dimensional object to be used with TestNG DataProvider
* @throws ExcelDataProviderException
*/
@Override
public Object[][] getAllExcelRows(Object myObj) throws ExcelDataProviderException {
logger.entering(myObj);
int i;
Object[][] obj = null;
Field[] fields = myObj.getClass().getDeclaredFields();
// Extracting number of rows of data to read
// Notice that numRows is returning the actual
// number of non-blank rows. Thus if there are
// blank rows in the sheet then we will miss
// some last rows of data.
List<Row> rowToBeRead = excelReader.getAllExcelRows(myObj.getClass().getSimpleName(), false);
if (!rowToBeRead.isEmpty()) {
i = 0;
obj = new Object[rowToBeRead.size()][1];
for (Row row : rowToBeRead) {
List<String> excelRowData = excelReader.getRowContents(row, fields.length);
if (excelRowData.size() != 0) {
try {
obj[i++][0] = prepareObject(myObj, fields, excelRowData);
} catch (IllegalAccessException e) {
throw new ExcelDataProviderException("Unable to create instance of type '"
+ myObj.getClass().getName() + "'", e);
}
}
}
}
logger.exiting(obj);
return obj;
}
}
| |
package co.gem.round.coinop;
import co.gem.round.encoding.Base58;
import co.gem.round.encoding.Hex;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.bitcoinj.core.*;
import org.bitcoinj.crypto.DeterministicKey;
import org.bitcoinj.crypto.HDKeyDerivation;
import org.bitcoinj.crypto.TransactionSignature;
import org.bitcoinj.script.Script;
import org.bitcoinj.script.ScriptBuilder;
import org.bitcoinj.wallet.DeterministicKeyChain;
import org.bitcoinj.wallet.DeterministicSeed;
import java.nio.ByteBuffer;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MultiWallet {
public static enum Blockchain {
TESTNET, MAINNET
}
private byte[] primarySeed, backupSeed;
private DeterministicKey primaryPrivateKey, backupPrivateKey,
backupPublicKey, cosignerPublicKey;
private NetworkParameters networkParameters;
private MultiWallet(NetworkParameters networkParameters) {
this.networkParameters = networkParameters;
SecureRandom random = new SecureRandom();
this.primarySeed = new DeterministicKeyChain(random).getSeed().getSeedBytes();
this.backupSeed = new DeterministicKeyChain(random).getSeed().getSeedBytes();
this.primaryPrivateKey = HDKeyDerivation.createMasterPrivateKey(primarySeed);
this.backupPrivateKey = HDKeyDerivation.createMasterPrivateKey(backupSeed);
this.backupPublicKey = HDKeyDerivation.createMasterPubKeyFromBytes(backupPrivateKey.getPubKey(), backupPrivateKey.getChainCode());
}
private MultiWallet(String primaryPrivateSeed, String backupPublicKey, String cosignerPublicKey) {
byte[] decodedCosignerPublicKey = new byte[0];
byte[] decodedBackupPublicKey = new byte[0];
byte[] decodedPrimarySeed = new byte[DeterministicSeed.DEFAULT_SEED_ENTROPY_BITS/8];
try {
decodedCosignerPublicKey = Base58.decode(cosignerPublicKey);
decodedBackupPublicKey = Base58.decode(backupPublicKey);
decodedPrimarySeed = Hex.decode(primaryPrivateSeed);
} catch (Exception e) {
e.printStackTrace();
}
this.primaryPrivateKey = HDKeyDerivation.createMasterPrivateKey(decodedPrimarySeed);
if (backupPublicKey != null) {
ByteBuffer buffer = ByteBuffer.wrap(decodedBackupPublicKey);
NetworkParameters networkParameters = networkParametersFromHeaderBytes(buffer.getInt());
this.backupPublicKey = DeterministicKey.deserializeB58(backupPublicKey, networkParameters);
}
if (cosignerPublicKey != null) {
ByteBuffer buffer = ByteBuffer.wrap(decodedCosignerPublicKey);
NetworkParameters networkParameters = networkParametersFromHeaderBytes(buffer.getInt());
this.cosignerPublicKey = DeterministicKey.deserializeB58(cosignerPublicKey, networkParameters);
}
this.networkParameters = networkParametersFromBlockchain(Blockchain.MAINNET);
}
public static NetworkParameters networkParametersFromBlockchain(Blockchain blockchain) {
switch (blockchain) {
case MAINNET:
return NetworkParameters.fromID(NetworkParameters.ID_MAINNET);
case TESTNET:
return NetworkParameters.fromID(NetworkParameters.ID_TESTNET);
}
return NetworkParameters.fromID(NetworkParameters.ID_TESTNET);
}
public static NetworkParameters networkParametersFromHeaderBytes(int headerBytes) {
if (headerBytes == 0x043587CF || headerBytes == 0x04358394)
return NetworkParameters.fromID(NetworkParameters.ID_TESTNET);
if (headerBytes == 0x0488B21E || headerBytes == 0x0488ADE4)
return NetworkParameters.fromID(NetworkParameters.ID_MAINNET);
return NetworkParameters.fromID(NetworkParameters.ID_TESTNET);
}
public static MultiWallet generate(Blockchain blockchain) {
NetworkParameters networkParameters = networkParametersFromBlockchain(blockchain);
return new MultiWallet(networkParameters);
}
public static MultiWallet importSeeds(String primaryPrivateSeed, String backupPublicSeed, String cosignerPublicSeed) {
return new MultiWallet(primaryPrivateSeed, backupPublicSeed, cosignerPublicSeed);
}
public Blockchain blockchain() {
if (networkParameters.getId().equals(NetworkParameters.ID_MAINNET))
return Blockchain.MAINNET;
else if (networkParameters.getId().equals(NetworkParameters.ID_TESTNET))
return Blockchain.TESTNET;
return Blockchain.TESTNET;
}
public String serializedPrimaryPrivateSeed() {
return Hex.encode(this.primarySeed);
}
public String serializedBackupPrivateSeed() {
return Hex.encode(this.backupSeed);
}
public String serializedPrimaryPrivateKey() {
return this.primaryPrivateKey.serializePrivB58(networkParameters);
}
public String serializedPrimaryPublicKey() {
return this.primaryPrivateKey.serializePubB58(networkParameters);
}
public String serializedBackupPrivateKey() {
return this.backupPrivateKey.serializePrivB58(networkParameters);
}
public String serializedBackupPublicKey() {
return this.backupPublicKey.serializePubB58(networkParameters);
}
public String serializedCosignerPublicKey() {
return this.cosignerPublicKey.serializePubB58(networkParameters);
}
public void purgeSeeds() {
this.primarySeed = null;
this.backupSeed = null;
}
public DeterministicKey childPrimaryPrivateKeyFromPath(String path) {
return childKeyFromPath(path, this.primaryPrivateKey);
}
public DeterministicKey childPrimaryPublicKeyFromPath(String path) {
return childKeyFromPath(path, this.primaryPrivateKey.dropPrivateBytes().dropParent());
}
public DeterministicKey childBackupPublicKeyFromPath(String path) {
return childKeyFromPath(path, this.backupPublicKey);
}
public DeterministicKey childCosignerPublicKeyFromPath(String path) {
return childKeyFromPath(path, this.cosignerPublicKey);
}
public static DeterministicKey childKeyFromPath(String path, DeterministicKey parentKey) {
String[] segments = path.split("/");
DeterministicKey currentKey = parentKey;
for (int i = 1; i < segments.length; i++) {
int childNumber = Integer.parseInt(segments[i]);
currentKey = HDKeyDerivation.deriveChildKey(currentKey, childNumber);
}
return currentKey;
}
public Script redeemScriptForPath(String path) {
DeterministicKey primaryPublicKey = this.childPrimaryPublicKeyFromPath(path);
DeterministicKey backupPublicKey = this.childBackupPublicKeyFromPath(path);
DeterministicKey cosignerPublicKey = this.childCosignerPublicKeyFromPath(path);
List<ECKey> pubKeys = Arrays.asList(new ECKey[]{
backupPublicKey, cosignerPublicKey, primaryPublicKey});
return ScriptBuilder.createMultiSigOutputScript(2, pubKeys);
}
public String base58SignatureForPath(String walletPath, Sha256Hash sigHash) {
DeterministicKey primaryPrivateKey = this.childPrimaryPrivateKeyFromPath(walletPath);
TransactionSignature signature = new TransactionSignature(primaryPrivateKey.sign(sigHash), Transaction.SigHash.ALL, false);
return Base58.encode(signature.encodeToBitcoin());
}
public NetworkParameters networkParameters() {
return networkParameters;
}
/**
* We return the sig_hash and the wallet_path in every input of
* every unsigned transaction. Thus, all the data you need to sign the
* input is on the returned JSON. No need to parse the entire thing -
* this lets us use multinetwork without BitcoinJ yelling at us.
*/
public List<String> signaturesFromUnparsedTransaction(JsonObject transactionJson) {
List<String> signatures = new ArrayList<>();
for (JsonElement raw : transactionJson.get("inputs").getAsJsonArray()) {
JsonObject input = raw.getAsJsonObject();
String sigHash = input.get("sig_hash").getAsString();
String walletPath = input.get("output").getAsJsonObject().get("metadata")
.getAsJsonObject().get("wallet_path").getAsString();
signatures.add(base58SignatureForPath(walletPath, new Sha256Hash(sigHash)));
}
return signatures;
}
public List<String> signaturesForTransaction(TransactionWrapper transaction) {
int inputIndex = 0;
List<String> signatures = new ArrayList<String>();
for (InputWrapper inputWrapper : transaction.inputs()) {
String walletPath = inputWrapper.walletPath();
Script redeemScript = this.redeemScriptForPath(walletPath);
Sha256Hash sigHash = transaction.transaction()
.hashForSignature(inputIndex, redeemScript, Transaction.SigHash.ALL, false);
String base58Signature = base58SignatureForPath(walletPath, sigHash);
signatures.add(base58Signature);
inputIndex++;
}
return signatures;
}
}
| |
package lemming.lemmatisation;
import lemming.auth.WebSession;
import lemming.context.*;
import lemming.data.GenericDataProvider;
import lemming.table.AutoShrinkBehavior;
import lemming.table.BadgeColumn;
import lemming.table.RowSelectColumn;
import lemming.table.TextFilterColumn;
import lemming.ui.DropdownButtonPanel;
import lemming.ui.TitleLabel;
import lemming.ui.input.InputPanel;
import lemming.ui.panel.FeedbackPanel;
import lemming.ui.panel.ModalFormPanel;
import lemming.ui.panel.SidebarPanel;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxChannel;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.attributes.AjaxRequestAttributes;
import org.apache.wicket.ajax.attributes.ThrottlingSettings;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeInstantiation;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.filter.FilterForm;
import org.apache.wicket.extensions.markup.html.repeater.util.SortParam;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.OnDomReadyHeaderItem;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.HiddenField;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.util.CollectionModel;
import org.apache.wicket.util.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* An index page that lists all available contexts in a data table for lemmatisation.
*/
@AuthorizeInstantiation({"SIGNED_IN"})
public class LemmatisationPage extends LemmatisationBasePage {
/**
* True if the filter form shall be enabled.
*/
private static final Boolean FILTER_FORM_ENABLED = false;
/**
* A data table for contexts.
*/
private final LemmatisationDataTable dataTable;
/**
* A sidebar panel displaying comments of contexts.
*/
private final CommentSidebar commentSidebar;
/**
* Creates a lemmatisation page.
*/
public LemmatisationPage() {
GenericDataProvider<Context> dataProvider = new GenericDataProvider<>(Context.class,
new SortParam<>("keyword", true));
FilterForm<Context> filterForm = new FilterForm<>("filterForm", dataProvider);
TextField<String> filterValueTextField = new TextField<>("filterTextField", Model.of(""));
TextField<String> filterPropertyTextField = new HiddenField<>("filterPropertyTextField", Model.of("keyword"));
DropdownButtonPanel dropdownButtonPanel = new DropdownButtonPanel<Context>(getString("Context.keyword"),
filterPropertyTextField, getColumns());
WebMarkupContainer container = new WebMarkupContainer("container");
Fragment fragment;
// check if the session is expired
WebSession.get().checkSessionExpired();
if (FILTER_FORM_ENABLED) {
fragment = new Fragment("fragment", "withFilterForm", this);
dataTable = new LemmatisationDataTable(getColumns(), dataProvider, filterForm);
filterValueTextField.add(new FilterUpdatingBehavior(filterValueTextField, filterPropertyTextField,
dataTable, dataProvider));
filterPropertyTextField.add(new FilterUpdatingBehavior(filterValueTextField, filterPropertyTextField,
dataTable, dataProvider));
filterForm.add(dataTable);
fragment.add(filterForm);
} else {
fragment = new Fragment("fragment", "withoutFilterForm", this);
dataTable = new LemmatisationDataTable(getColumns(), dataProvider);
filterValueTextField.add(new FilterUpdatingBehavior(filterValueTextField, filterPropertyTextField,
dataTable, dataProvider));
filterPropertyTextField.add(new FilterUpdatingBehavior(filterValueTextField, filterPropertyTextField,
dataTable, dataProvider));
fragment.add(dataTable);
}
dropdownButtonPanel.setSelectEvent("input");
filterValueTextField.add(new PageScrollingBehavior());
commentSidebar = new CommentSidebar("commentSidebar", SidebarPanel.Orientation.RIGHT) {
@Override
public void onRemoveComment(IModel<Context> model, AjaxRequestTarget target) {
Collection<IModel<Context>> rowModels = dataTable.getRowModels();
CollectionModel<Integer> selectedContextIds = new CollectionModel<>(new ArrayList<>());
for (IModel<Context> rowModel : rowModels) {
if (rowModel.getObject().getSelected()) {
selectedContextIds.getObject().add(rowModel.getObject().getId());
}
}
dataTable.updateSelectedContexts(selectedContextIds);
target.add(dataTable);
}
};
add(new FeedbackPanel());
add(new InputPanel());
add(filterValueTextField);
add(filterPropertyTextField.setOutputMarkupId(true));
add(dropdownButtonPanel);
add(commentSidebar);
add(container);
container.add(fragment);
// auto-shrink following and preceding text columns
add(new AutoShrinkBehavior());
}
/**
* Called when a lemmatisation page is initialized.
*/
@Override
protected void onInitialize() {
super.onInitialize();
add(new TitleLabel(getString("LemmatisationPage.header")));
Panel lemmatisationPanel = new LemmatisationPanel();
ModalFormPanel setLemmaPanel = new SetLemmaPanel(dataTable);
ModalFormPanel setPosPanel = new SetPosPanel(dataTable);
ModalFormPanel addCommentPanel = new AddCommentPanel(dataTable);
lemmatisationPanel.add(new SetLemmaLink(setLemmaPanel));
lemmatisationPanel.add(new SetPosLink(setPosPanel));
lemmatisationPanel.add(new MarkContextLink(dataTable));
lemmatisationPanel.add(new AddCommentLink(addCommentPanel));
add(setLemmaPanel);
add(setPosPanel);
add(addCommentPanel);
add(lemmatisationPanel);
}
/**
* Returns the list of columns of the data table.
*
* @return A list of columns.
*/
private List<IColumn<Context, String>> getColumns() {
List<IColumn<Context, String>> columns = new ArrayList<>();
columns.add(new ContextRowSelectColumn(Model.of("")));
columns.add(new NumberTextFilterColumn(Model.of(getString("Context.number")), "number", "number"));
columns.add(new TextFilterColumn(Model.of(getString("Context.lemma")), "lemmaString", "lemmaString"));
columns.add(new TextFilterColumn<>(Model.of(getString("Context.pos")), "posString", "posString"));
columns.add(new TextFilterColumn<>(Model.of(getString("Context.location")), "location", "location"));
columns.add(new PrecedingContextTextFilterColumn(Model.of(getString("Context.preceding")), "preceding",
"preceding"));
columns.add(new KeywordTextFilterColumn(Model.of(getString("Context.keyword")), "keyword", "keyword"));
columns.add(new FollowingContextTextFilterColumn(Model.of(getString("Context.following")), "following",
"following"));
columns.add(new ContextBadgeColumn(Model.of("")));
return columns;
}
/**
* A row selection column for contexts.
*/
private class ContextRowSelectColumn extends RowSelectColumn<Context, Context, String> {
/**
* Creates a row selection column.
*
* @param displayModel title of a column
*/
public ContextRowSelectColumn(IModel<String> displayModel) {
super(displayModel, "selected");
}
/**
* Returns the CSS class of a column.
*
* @return A CSS class.
*/
@Override
public String getCssClass() {
return "hidden";
}
}
/**
* A badge column for contexts.
*/
private class ContextBadgeColumn extends BadgeColumn<Context, Context, String> {
/**
* Creates a badge column.
*
* @param displayModel title of a column
*/
public ContextBadgeColumn(IModel<String> displayModel) {
super(displayModel, "badge");
}
/**
* Creates a badge panel.
*
* @param panelId ID of the panel
* @param model model of the row item
* @return A badge panel.
*/
@Override
public Panel createBadgePanel(String panelId, IModel<Context> model) {
Context refreshedContext = new ContextDao().refresh(model.getObject());
if (refreshedContext.getComments() != null && refreshedContext.getComments().size() > 0) {
return new BadgePanel(panelId, model, String.valueOf(refreshedContext.getComments().size()), null);
} else {
return (Panel) new BadgePanel(panelId, model, null, "0").setVisible(false);
}
}
/**
* Called when a link inside a badge panel is clicked.
*
* @param target target that produces an Ajax response
* @param model model of the row item
*/
@Override
public void onClick(AjaxRequestTarget target, IModel<Context> model) {
commentSidebar.refresh(model, target);
commentSidebar.toggle(target);
}
}
/**
* Implementation of a form component updating behavior for a filter text field.
*/
private class FilterUpdatingBehavior extends AjaxFormComponentUpdatingBehavior {
/**
* The text field used as value filter component.
*/
final TextField<String> valueTextField;
/**
* The text field used as property filter component.
*/
final TextField<String> propertyTextField;
/**
* Data table displaying filtered data.
*/
final LemmatisationDataTable dataTable;
/**
* Data provider which delivers data for the table.
*/
final GenericDataProvider<Context> dataProvider;
/**
* Creates a behavior.
*
* @param valueTextField text field used a value filter component
* @param propertyTextField text field used a property filter component
* @param dataTable data table displaying filtered data
* @param dataProvider data provider which delivers data for the table.
*/
public FilterUpdatingBehavior(TextField<String> valueTextField, TextField<String> propertyTextField,
LemmatisationDataTable dataTable, GenericDataProvider<Context> dataProvider) {
super("input");
this.valueTextField = valueTextField;
this.propertyTextField = propertyTextField;
this.dataTable = dataTable;
this.dataProvider = dataProvider;
}
/**
* Called when the text field content changes.
*
* @param target target that produces an Ajax response
*/
@Override
protected void onUpdate(AjaxRequestTarget target) {
dataProvider.updateFilter(valueTextField.getModelObject(), propertyTextField.getModelObject());
target.add(dataTable);
}
/**
* Modifies Ajax request attributes.
*
* @param attributes Ajax request attributes
*/
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
super.updateAjaxAttributes(attributes);
attributes.setChannel(new AjaxChannel(getComponent().getId(), AjaxChannel.Type.DROP));
attributes.setThrottlingSettings(new ThrottlingSettings(Duration.milliseconds(200)));
}
}
/**
* Adds a behavior to the filter text field which reacts to page scrolling.
*/
private class PageScrollingBehavior extends Behavior {
/**
* Renders to the web response what the component wants to contribute.
*
* @param component component object
* @param response response object
*/
@Override
public void renderHead(Component component, IHeaderResponse response) {
String javaScript = String.format("jQuery(window).scroll(function () { " +
"var focused = jQuery(':focus'), input = jQuery('#%s'); " +
"if (focused.length) { return; } " +
"if (input.length && input.isInViewport(input.height())) { " +
"input.focus(); } });", component.getMarkupId());
response.render(OnDomReadyHeaderItem.forScript(javaScript));
}
}
/**
* A link which opens a set lemma dialog.
*/
private final class SetLemmaLink extends AjaxLink<Context> {
/**
* Modal form panel which is shown on click.
*/
private final ModalFormPanel setLemmaPanel;
/**
* Creates a set lemma link.
*/
public SetLemmaLink(ModalFormPanel setLemmaPanel) {
super("setLemmaLink");
this.setLemmaPanel = setLemmaPanel;
}
/**
* Called on click.
*
* @param target target that produces an Ajax response
*/
@Override
public void onClick(AjaxRequestTarget target) {
setLemmaPanel.show(target);
}
}
/**
* A link which opens a set part of speech dialog.
*/
private final class SetPosLink extends AjaxLink<Context> {
/**
* Modal form panel which is shown on click.
*/
private final ModalFormPanel setPosPanel;
/**
* Creates a set part of speech link.
*/
public SetPosLink(ModalFormPanel setPosPanel) {
super("setPosLink");
this.setPosPanel = setPosPanel;
}
/**
* Called on click.
*
* @param target target that produces an Ajax response
*/
@Override
public void onClick(AjaxRequestTarget target) {
setPosPanel.show(target);
}
}
/**
* A link which opens an add comment dialog.
*/
private final class AddCommentLink extends AjaxLink<Context> {
/**
* Modal form panel which is shown on click.
*/
private final ModalFormPanel addCommentPanel;
/**
* Creates a set comment link.
*/
public AddCommentLink(ModalFormPanel addCommentPanel) {
super("addCommentLink");
this.addCommentPanel = addCommentPanel;
}
/**
* Called on click.
*
* @param target target that produces an Ajax response
*/
@Override
public void onClick(AjaxRequestTarget target) {
addCommentPanel.show(target);
}
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.buffer.ByteBufAllocator;
import io.netty.util.internal.EmptyArrays;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import javax.crypto.NoSuchPaddingException;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSessionContext;
import java.io.File;
import java.io.IOException;
import java.security.InvalidAlgorithmParameterException;
import java.security.KeyException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.Security;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.security.spec.InvalidKeySpecException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static io.netty.util.internal.ObjectUtil.*;
/**
* An {@link SslContext} which uses JDK's SSL/TLS implementation.
*/
public abstract class JdkSslContext extends SslContext {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(JdkSslContext.class);
static final String PROTOCOL = "TLS";
static final String[] PROTOCOLS;
static final List<String> DEFAULT_CIPHERS;
static final Set<String> SUPPORTED_CIPHERS;
static {
SSLContext context;
int i;
try {
context = SSLContext.getInstance(PROTOCOL);
context.init(null, null, null);
} catch (Exception e) {
throw new Error("failed to initialize the default SSL context", e);
}
SSLEngine engine = context.createSSLEngine();
// Choose the sensible default list of protocols.
final String[] supportedProtocols = engine.getSupportedProtocols();
Set<String> supportedProtocolsSet = new HashSet<String>(supportedProtocols.length);
for (i = 0; i < supportedProtocols.length; ++i) {
supportedProtocolsSet.add(supportedProtocols[i]);
}
List<String> protocols = new ArrayList<String>();
addIfSupported(
supportedProtocolsSet, protocols,
"TLSv1.2", "TLSv1.1", "TLSv1");
if (!protocols.isEmpty()) {
PROTOCOLS = protocols.toArray(new String[protocols.size()]);
} else {
PROTOCOLS = engine.getEnabledProtocols();
}
// Choose the sensible default list of cipher suites.
final String[] supportedCiphers = engine.getSupportedCipherSuites();
SUPPORTED_CIPHERS = new HashSet<String>(supportedCiphers.length);
for (i = 0; i < supportedCiphers.length; ++i) {
SUPPORTED_CIPHERS.add(supportedCiphers[i]);
}
List<String> ciphers = new ArrayList<String>();
addIfSupported(
SUPPORTED_CIPHERS, ciphers,
// XXX: Make sure to sync this list with OpenSslEngineFactory.
// GCM (Galois/Counter Mode) requires JDK 8.
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
// AES256 requires JCE unlimited strength jurisdiction policy files.
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
// GCM (Galois/Counter Mode) requires JDK 8.
"TLS_RSA_WITH_AES_128_GCM_SHA256",
"TLS_RSA_WITH_AES_128_CBC_SHA",
// AES256 requires JCE unlimited strength jurisdiction policy files.
"TLS_RSA_WITH_AES_256_CBC_SHA",
"SSL_RSA_WITH_3DES_EDE_CBC_SHA");
if (ciphers.isEmpty()) {
// Use the default from JDK as fallback.
for (String cipher : engine.getEnabledCipherSuites()) {
if (cipher.contains("_RC4_")) {
continue;
}
ciphers.add(cipher);
}
}
DEFAULT_CIPHERS = Collections.unmodifiableList(ciphers);
if (logger.isDebugEnabled()) {
logger.debug("Default protocols (JDK): {} ", Arrays.asList(PROTOCOLS));
logger.debug("Default cipher suites (JDK): {}", DEFAULT_CIPHERS);
}
}
private static void addIfSupported(Set<String> supported, List<String> enabled, String... names) {
for (String n: names) {
if (supported.contains(n)) {
enabled.add(n);
}
}
}
private final String[] cipherSuites;
private final List<String> unmodifiableCipherSuites;
private final JdkApplicationProtocolNegotiator apn;
private final ClientAuth clientAuth;
JdkSslContext(Iterable<String> ciphers, CipherSuiteFilter cipherFilter, JdkApplicationProtocolNegotiator apn,
ClientAuth clientAuth) {
this.apn = checkNotNull(apn, "apn");
this.clientAuth = checkNotNull(clientAuth, "clientAuth");
cipherSuites = checkNotNull(cipherFilter, "cipherFilter").filterCipherSuites(
ciphers, DEFAULT_CIPHERS, SUPPORTED_CIPHERS);
unmodifiableCipherSuites = Collections.unmodifiableList(Arrays.asList(cipherSuites));
}
/**
* Returns the JDK {@link SSLContext} object held by this context.
*/
public abstract SSLContext context();
/**
* Returns the JDK {@link SSLSessionContext} object held by this context.
*/
@Override
public final SSLSessionContext sessionContext() {
if (isServer()) {
return context().getServerSessionContext();
} else {
return context().getClientSessionContext();
}
}
@Override
public final List<String> cipherSuites() {
return unmodifiableCipherSuites;
}
@Override
public final long sessionCacheSize() {
return sessionContext().getSessionCacheSize();
}
@Override
public final long sessionTimeout() {
return sessionContext().getSessionTimeout();
}
@Override
public final SSLEngine newEngine(ByteBufAllocator alloc) {
return configureAndWrapEngine(context().createSSLEngine());
}
@Override
public final SSLEngine newEngine(ByteBufAllocator alloc, String peerHost, int peerPort) {
return configureAndWrapEngine(context().createSSLEngine(peerHost, peerPort));
}
private SSLEngine configureAndWrapEngine(SSLEngine engine) {
engine.setEnabledCipherSuites(cipherSuites);
engine.setEnabledProtocols(PROTOCOLS);
engine.setUseClientMode(isClient());
if (isServer()) {
switch (clientAuth) {
case OPTIONAL:
engine.setWantClientAuth(true);
break;
case REQUIRE:
engine.setNeedClientAuth(true);
break;
}
}
return apn.wrapperFactory().wrapSslEngine(engine, apn, isServer());
}
@Override
public JdkApplicationProtocolNegotiator applicationProtocolNegotiator() {
return apn;
}
/**
* Translate a {@link ApplicationProtocolConfig} object to a {@link JdkApplicationProtocolNegotiator} object.
* @param config The configuration which defines the translation
* @param isServer {@code true} if a server {@code false} otherwise.
* @return The results of the translation
*/
static JdkApplicationProtocolNegotiator toNegotiator(ApplicationProtocolConfig config, boolean isServer) {
if (config == null) {
return JdkDefaultApplicationProtocolNegotiator.INSTANCE;
}
switch(config.protocol()) {
case NONE:
return JdkDefaultApplicationProtocolNegotiator.INSTANCE;
case ALPN:
if (isServer) {
switch(config.selectorFailureBehavior()) {
case FATAL_ALERT:
return new JdkAlpnApplicationProtocolNegotiator(true, config.supportedProtocols());
case NO_ADVERTISE:
return new JdkAlpnApplicationProtocolNegotiator(false, config.supportedProtocols());
default:
throw new UnsupportedOperationException(new StringBuilder("JDK provider does not support ")
.append(config.selectorFailureBehavior()).append(" failure behavior").toString());
}
} else {
switch(config.selectedListenerFailureBehavior()) {
case ACCEPT:
return new JdkAlpnApplicationProtocolNegotiator(false, config.supportedProtocols());
case FATAL_ALERT:
return new JdkAlpnApplicationProtocolNegotiator(true, config.supportedProtocols());
default:
throw new UnsupportedOperationException(new StringBuilder("JDK provider does not support ")
.append(config.selectedListenerFailureBehavior()).append(" failure behavior").toString());
}
}
case NPN:
if (isServer) {
switch(config.selectedListenerFailureBehavior()) {
case ACCEPT:
return new JdkNpnApplicationProtocolNegotiator(false, config.supportedProtocols());
case FATAL_ALERT:
return new JdkNpnApplicationProtocolNegotiator(true, config.supportedProtocols());
default:
throw new UnsupportedOperationException(new StringBuilder("JDK provider does not support ")
.append(config.selectedListenerFailureBehavior()).append(" failure behavior").toString());
}
} else {
switch(config.selectorFailureBehavior()) {
case FATAL_ALERT:
return new JdkNpnApplicationProtocolNegotiator(true, config.supportedProtocols());
case NO_ADVERTISE:
return new JdkNpnApplicationProtocolNegotiator(false, config.supportedProtocols());
default:
throw new UnsupportedOperationException(new StringBuilder("JDK provider does not support ")
.append(config.selectorFailureBehavior()).append(" failure behavior").toString());
}
}
default:
throw new UnsupportedOperationException(new StringBuilder("JDK provider does not support ")
.append(config.protocol()).append(" protocol").toString());
}
}
/**
* Build a {@link KeyManagerFactory} based upon a key file, key file password, and a certificate chain.
* @param certChainFile a X.509 certificate chain file in PEM format
* @param keyFile a PKCS#8 private key file in PEM format
* @param keyPassword the password of the {@code keyFile}.
* {@code null} if it's not password-protected.
* @param kmf The existing {@link KeyManagerFactory} that will be used if not {@code null}
* @return A {@link KeyManagerFactory} based upon a key file, key file password, and a certificate chain.
* @deprecated will be removed.
*/
@Deprecated
protected static KeyManagerFactory buildKeyManagerFactory(File certChainFile, File keyFile, String keyPassword,
KeyManagerFactory kmf)
throws UnrecoverableKeyException, KeyStoreException, NoSuchAlgorithmException,
NoSuchPaddingException, InvalidKeySpecException, InvalidAlgorithmParameterException,
CertificateException, KeyException, IOException {
String algorithm = Security.getProperty("ssl.KeyManagerFactory.algorithm");
if (algorithm == null) {
algorithm = "SunX509";
}
return buildKeyManagerFactory(certChainFile, algorithm, keyFile, keyPassword, kmf);
}
static KeyManagerFactory buildKeyManagerFactory(X509Certificate[] certChain, PrivateKey key, String keyPassword,
KeyManagerFactory kmf)
throws UnrecoverableKeyException, KeyStoreException, NoSuchAlgorithmException,
CertificateException, IOException {
String algorithm = Security.getProperty("ssl.KeyManagerFactory.algorithm");
if (algorithm == null) {
algorithm = "SunX509";
}
return buildKeyManagerFactory(certChain, algorithm, key, keyPassword, kmf);
}
/**
* Build a {@link KeyManagerFactory} based upon a key algorithm, key file, key file password,
* and a certificate chain.
* @param certChainFile a X.509 certificate chain file in PEM format
* @param keyAlgorithm the standard name of the requested algorithm. See the Java Secure Socket Extension
* Reference Guide for information about standard algorithm names.
* @param keyFile a PKCS#8 private key file in PEM format
* @param keyPassword the password of the {@code keyFile}.
* {@code null} if it's not password-protected.
* @param kmf The existing {@link KeyManagerFactory} that will be used if not {@code null}
* @return A {@link KeyManagerFactory} based upon a key algorithm, key file, key file password,
* and a certificate chain.
* @deprecated will be removed.
*/
@Deprecated
protected static KeyManagerFactory buildKeyManagerFactory(File certChainFile,
String keyAlgorithm, File keyFile, String keyPassword, KeyManagerFactory kmf)
throws KeyStoreException, NoSuchAlgorithmException, NoSuchPaddingException,
InvalidKeySpecException, InvalidAlgorithmParameterException, IOException,
CertificateException, KeyException, UnrecoverableKeyException {
return buildKeyManagerFactory(toX509Certificates(certChainFile), keyAlgorithm,
toPrivateKey(keyFile, keyPassword), keyPassword, kmf);
}
static KeyManagerFactory buildKeyManagerFactory(X509Certificate[] certChainFile,
String keyAlgorithm, PrivateKey key,
String keyPassword, KeyManagerFactory kmf)
throws KeyStoreException, NoSuchAlgorithmException, IOException,
CertificateException, UnrecoverableKeyException {
char[] keyPasswordChars = keyPassword == null ? EmptyArrays.EMPTY_CHARS : keyPassword.toCharArray();
KeyStore ks = buildKeyStore(certChainFile, key, keyPasswordChars);
// Set up key manager factory to use our key store
if (kmf == null) {
kmf = KeyManagerFactory.getInstance(keyAlgorithm);
}
kmf.init(ks, keyPasswordChars);
return kmf;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.hdfs;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.sentry.provider.db.SentryPolicyStorePlugin.SentryPluginException;
import org.apache.sentry.provider.db.service.persistent.HAContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UpdateForwarder<K extends Updateable.Update> implements
Updateable<K>, Closeable {
interface ExternalImageRetriever<K> {
K retrieveFullImage(long currSeqNum);
}
private final AtomicLong lastSeenSeqNum = new AtomicLong(0);
protected final AtomicLong lastCommittedSeqNum = new AtomicLong(0);
// Updates should be handled in order
private final Executor updateHandler = Executors.newSingleThreadExecutor();
// Update log is used when propagate updates to a downstream cache.
// The preUpdate log stores all commits that were applied to this cache.
// When the update log is filled to capacity (getMaxUpdateLogSize()), all
// entries are cleared and a compact image if the state of the cache is
// appended to the log.
// The first entry in an update log (consequently the first preUpdate a
// downstream cache sees) will be a full image. All subsequent entries are
// partial edits
protected final LinkedList<K> updateLog = new LinkedList<K>();
// UpdateLog is disabled when getMaxUpdateLogSize() = 0;
private final int maxUpdateLogSize;
private final ExternalImageRetriever<K> imageRetreiver;
private volatile Updateable<K> updateable;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
protected static final long INIT_SEQ_NUM = -2;
protected static final int INIT_UPDATE_RETRY_DELAY = 5000;
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateForwarder.class);
private static final String UPDATABLE_TYPE_NAME = "update_forwarder";
public UpdateForwarder(Configuration conf, Updateable<K> updateable,
ExternalImageRetriever<K> imageRetreiver, int maxUpdateLogSize) {
this(conf, updateable, imageRetreiver, maxUpdateLogSize, INIT_UPDATE_RETRY_DELAY);
}
public UpdateForwarder(Configuration conf, Updateable<K> updateable, //NOPMD
ExternalImageRetriever<K> imageRetreiver, int maxUpdateLogSize,
int initUpdateRetryDelay) {
this.maxUpdateLogSize = maxUpdateLogSize;
this.imageRetreiver = imageRetreiver;
if (imageRetreiver != null) {
spawnInitialUpdater(updateable, initUpdateRetryDelay);
} else {
this.updateable = updateable;
}
}
public static <K extends Updateable.Update> UpdateForwarder<K> create(Configuration conf,
Updateable<K> updateable, K update, ExternalImageRetriever<K> imageRetreiver,
int maxUpdateLogSize) throws SentryPluginException {
return create(conf, updateable, update, imageRetreiver, maxUpdateLogSize,
INIT_UPDATE_RETRY_DELAY);
}
public static <K extends Updateable.Update> UpdateForwarder<K> create(Configuration conf,
Updateable<K> updateable, K update, ExternalImageRetriever<K> imageRetreiver,
int maxUpdateLogSize, int initUpdateRetryDelay) throws SentryPluginException {
if (HAContext.isHaEnabled(conf)) {
return new UpdateForwarderWithHA<K>(conf, updateable, update, imageRetreiver,
maxUpdateLogSize, initUpdateRetryDelay);
} else {
return new UpdateForwarder<K>(conf, updateable, imageRetreiver,
maxUpdateLogSize, initUpdateRetryDelay);
}
}
private void spawnInitialUpdater(final Updateable<K> updateable,
final int initUpdateRetryDelay) {
K firstFullImage = null;
try {
firstFullImage = imageRetreiver.retrieveFullImage(INIT_SEQ_NUM);
} catch (Exception e) {
LOGGER.warn("InitialUpdater encountered exception !! ", e);
firstFullImage = null;
Thread initUpdater = new Thread() {
@Override
public void run() {
while (UpdateForwarder.this.updateable == null) {
try {
Thread.sleep(initUpdateRetryDelay);
} catch (InterruptedException e) {
LOGGER.warn("Thread interrupted !! ", e);
break;
}
K fullImage = null;
try {
fullImage =
UpdateForwarder.this.imageRetreiver
.retrieveFullImage(INIT_SEQ_NUM);
appendToUpdateLog(fullImage);
} catch (Exception e) {
LOGGER.warn("InitialUpdater encountered exception !! ", e);
}
if (fullImage != null) {
UpdateForwarder.this.updateable = updateable.updateFull(fullImage);
}
}
}
};
initUpdater.start();
}
if (firstFullImage != null) {
appendToUpdateLog(firstFullImage);
this.updateable = updateable.updateFull(firstFullImage);
}
}
/**
* Handle notifications from HMS plug-in or upstream Cache
* @param update
*/
public void handleUpdateNotification(final K update) throws SentryPluginException {
// Correct the seqNums on the first update
if (lastCommittedSeqNum.get() == INIT_SEQ_NUM) {
K firstUpdate = getUpdateLog().peek();
long firstSeqNum = update.getSeqNum() - 1;
if (firstUpdate != null) {
firstUpdate.setSeqNum(firstSeqNum);
}
lastCommittedSeqNum.set(firstSeqNum);
lastSeenSeqNum.set(firstSeqNum);
}
final boolean editNotMissed =
lastSeenSeqNum.incrementAndGet() == update.getSeqNum();
if (!editNotMissed) {
lastSeenSeqNum.set(update.getSeqNum());
}
Runnable task = new Runnable() {
@Override
public void run() {
K toUpdate = update;
if (update.hasFullImage()) {
updateable = updateable.updateFull(update);
} else {
if (editNotMissed) {
// apply partial preUpdate
updateable.updatePartial(Collections.singletonList(update), lock);
} else {
// Retrieve full update from External Source and
if (imageRetreiver != null) {
toUpdate = imageRetreiver
.retrieveFullImage(update.getSeqNum());
updateable = updateable.updateFull(toUpdate);
}
}
}
appendToUpdateLog(toUpdate);
}
};
updateHandler.execute(task);
}
protected void appendToUpdateLog(K update) {
synchronized (getUpdateLog()) {
boolean logCompacted = false;
if (getMaxUpdateLogSize() > 0) {
if (update.hasFullImage() || getUpdateLog().size() == getMaxUpdateLogSize()) {
// Essentially a log compaction
getUpdateLog().clear();
getUpdateLog().add(update.hasFullImage() ? update
: createFullImageUpdate(update.getSeqNum()));
logCompacted = true;
} else {
getUpdateLog().add(update);
}
}
lastCommittedSeqNum.set(update.getSeqNum());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("#### Appending to Update Log ["
+ "type=" + update.getClass() + ", "
+ "lastCommit=" + lastCommittedSeqNum.get() + ", "
+ "lastSeen=" + lastSeenSeqNum.get() + ", "
+ "logCompacted=" + logCompacted + "]");
}
}
}
/**
* Return all updates from requested seqNum (inclusive)
* @param seqNum
* @return
*/
public List<K> getAllUpdatesFrom(long seqNum) {
List<K> retVal = new LinkedList<K>();
synchronized (getUpdateLog()) {
long currSeqNum = lastCommittedSeqNum.get();
if (LOGGER.isDebugEnabled() && updateable != null) {
LOGGER.debug("#### GetAllUpdatesFrom ["
+ "type=" + updateable.getClass() + ", "
+ "reqSeqNum=" + seqNum + ", "
+ "lastCommit=" + lastCommittedSeqNum.get() + ", "
+ "lastSeen=" + lastSeenSeqNum.get() + ", "
+ "getMaxUpdateLogSize()=" + getUpdateLog().size() + "]");
}
if (getMaxUpdateLogSize() == 0) {
// no updatelog configured..
return retVal;
}
K head = getUpdateLog().peek();
if (head == null) {
return retVal;
}
if (seqNum > currSeqNum + 1) {
// This process has probably restarted since downstream
// recieved last update
retVal.addAll(getUpdateLog());
return retVal;
}
if (head.getSeqNum() > seqNum) {
// Caller has diverged greatly..
if (head.hasFullImage()) {
// head is a refresh(full) image
// Send full image along with partial updates
for (K u : getUpdateLog()) {
retVal.add(u);
}
} else {
// Create a full image
// clear updateLog
// add fullImage to head of Log
// NOTE : This should ideally never happen
K fullImage = createFullImageUpdate(currSeqNum);
getUpdateLog().clear();
getUpdateLog().add(fullImage);
retVal.add(fullImage);
}
} else {
// increment iterator to requested seqNum
Iterator<K> iter = getUpdateLog().iterator();
while (iter.hasNext()) {
K elem = iter.next();
if (elem.getSeqNum() >= seqNum) {
retVal.add(elem);
}
}
}
}
return retVal;
}
public boolean areAllUpdatesCommited() {
return lastCommittedSeqNum.get() == lastSeenSeqNum.get();
}
public long getLastCommitted() {
return lastCommittedSeqNum.get();
}
public long getLastSeen() {
return lastSeenSeqNum.get();
}
@Override
public Updateable<K> updateFull(K update) {
return (updateable != null) ? updateable.updateFull(update) : null;
}
@Override
public void updatePartial(Iterable<K> updates, ReadWriteLock lock) {
if (updateable != null) {
updateable.updatePartial(updates, lock);
}
}
@Override
public long getLastUpdatedSeqNum() {
return (updateable != null) ? updateable.getLastUpdatedSeqNum() : INIT_SEQ_NUM;
}
@Override
public K createFullImageUpdate(long currSeqNum) {
return (updateable != null) ? updateable.createFullImageUpdate(currSeqNum) : null;
}
@Override
public String getUpdateableTypeName() {
// TODO Auto-generated method stub
return UPDATABLE_TYPE_NAME;
}
protected LinkedList<K> getUpdateLog() {
return updateLog;
}
protected int getMaxUpdateLogSize() {
return maxUpdateLogSize;
}
@Override
public void close() throws IOException {
}
}
| |
package com.salama.service.clouddata.util;
import MetoXML.AbstractReflectInfoCachedSerializer;
import MetoXML.Util.ITreeNode;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
*
* @author XingGu Liu
*
*/
public class SimpleJSONDataUtil extends AbstractReflectInfoCachedSerializer {
private static Logger logger = Logger.getLogger(SimpleJSONDataUtil.class);
public static JSONObject convertObjectToJSONObject(Object obj) {
return new JSONObject(obj);
}
public static String convertObjectToJSON(Object obj) {
JSONObject json = new JSONObject(obj);
return json.toString();
}
public static JSONArray convertListObjectToJSONArray(List<?> objList) {
JSONArray jsonArray = new JSONArray();
for(Object obj : objList) {
jsonArray.put(new JSONObject(obj));
}
return jsonArray;
}
public static String convertListObjectToJSON(List<?> objList) {
return convertListObjectToJSONArray(objList).toString();
}
public static <T> List<T> convertJSONToListObject(String jsonString, Class<T> objType)
throws IntrospectionException, IllegalAccessException, InstantiationException, JSONException,
ParseException, InvocationTargetException{
JSONArray jsonArray = new JSONArray(jsonString);
List<T> listData = new ArrayList<T>();
int length = jsonArray.length();
JSONObject jsonObject = null;
T data = null;
for(int i = 0; i < length; i++) {
jsonObject = jsonArray.getJSONObject(i);
data = (T) convertJSONToObject(jsonObject, objType);
listData.add(data);
}
return listData;
}
public static Object convertJSONToObject(String jsonString, Class<?> objType)
throws IntrospectionException, IllegalAccessException, InstantiationException, JSONException,
ParseException, InvocationTargetException {
JSONObject json = new JSONObject(jsonString);
return convertJSONToObject(json, objType);
}
public static Object convertJSONToObject(JSONObject jsonObject, Class<?> objType)
throws IntrospectionException, IllegalAccessException, InstantiationException, JSONException,
ParseException, InvocationTargetException {
// PropertyDescriptor[] properties = Introspector.getBeanInfo(objType).getPropertyDescriptors();
PropertyDescriptor[] properties = findPropertyDescriptorArray(objType);
Object data = objType.newInstance();
PropertyDescriptor property;
String propName;
for(int i = 0; i < properties.length; i++) {
property = properties[i];
propName = property.getName();
if(propName.equals("class")) {
continue;
}
if(!jsonObject.has(propName)) {
continue;
}
Object value;
try {
if(isList(property.getPropertyType())) {
String strValue = jsonObject.get(propName).toString();
value = convertJSONToListObject(strValue, objType);
} else if(isArray(property.getPropertyType())) {
JSONArray jsonArray = (JSONArray) jsonObject.get(propName);
Class<?> elementType = property.getPropertyType().getComponentType();
value = Array.newInstance(elementType, jsonArray.length());
for(int k = 0; k < jsonArray.length(); k++) {
JSONObject jsonObjInArray = jsonArray.getJSONObject(k);
Array.set(value, k, convertJSONToObject(jsonObjInArray, elementType));
}
} else if(property.getPropertyType() == String.class) {
value = jsonObject.getString(propName);
} else {
String strValue = jsonObject.get(propName).toString();
value = Convert(property.getPropertyType(), strValue);
}
} catch(JSONException e) {
//null
logger.warn("convertJSONToObject()", e);
value = Convert(property.getPropertyType(), null);
}
property.getWriteMethod().invoke(data, value);
}
return data;
}
public static Object Convert(Class<?> type, String valueStr) throws IllegalAccessException, ParseException, IntrospectionException, InstantiationException, JSONException, InvocationTargetException {
SimpleDateFormat JavaUtilDateFormatForParse = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy", Locale.ENGLISH);
SimpleDateFormat JavaSqlTimeStampFormatForParse = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
SimpleDateFormat JavaSqlDateFormatForParse = new SimpleDateFormat("yyyy-MM-dd");
Class<?> cls = (Class<?>) type;
if(cls == String.class) {
return valueStr;
} else if(cls == boolean.class) {
return valueStr == null? Boolean.valueOf(false) : Boolean.valueOf(valueStr);
} else if(cls == byte.class) {
return valueStr == null? Byte.valueOf((byte)0) : Byte.valueOf(valueStr);
} else if(cls == short.class) {
return valueStr == null? Short.valueOf((short)0) : Short.valueOf(valueStr);
} else if(cls == int.class) {
return valueStr == null? Integer.valueOf(0) : Integer.valueOf(valueStr);
} else if(cls == long.class) {
return valueStr == null? Long.valueOf(0) : Long.valueOf(valueStr);
} else if(cls == float.class) {
return valueStr == null? Float.valueOf(0) : Float.valueOf(valueStr);
} else if(cls == double.class) {
return valueStr == null? Double.valueOf(0) : Double.valueOf(valueStr);
} else if(cls == char.class) {
return valueStr == null? Character.valueOf('\0') : valueStr.charAt(0);
} else if(cls == Boolean.class) {
return valueStr == null? Boolean.valueOf(false) : Boolean.valueOf(valueStr);
} else if(cls == Byte.class) {
return valueStr == null? Byte.valueOf((byte)0) : Byte.valueOf(valueStr);
} else if(cls == Short.class) {
return valueStr == null? Short.valueOf((short)0) : Short.valueOf(valueStr);
} else if(cls == Integer.class) {
return valueStr == null? Integer.valueOf(0) : Integer.valueOf(valueStr);
} else if(cls == Long.class) {
return valueStr == null? Long.valueOf(0) : Long.valueOf(valueStr);
} else if(cls == Float.class) {
return valueStr == null? Float.valueOf(0) : Float.valueOf(valueStr);
} else if(cls == Double.class) {
return valueStr == null? Double.valueOf(0) : Double.valueOf(valueStr);
} else if(cls == Character.class) {
return valueStr == null? Character.valueOf('\0') : valueStr.charAt(0);
} else if(cls == Date.class) {
return valueStr == null? null : JavaUtilDateFormatForParse.parse(valueStr);
} else if(cls == java.sql.Date.class) {
if(valueStr == null) {
return null;
} else {
Date date = JavaSqlDateFormatForParse.parse(valueStr);
return (new java.sql.Date(date.getTime()));
}
} else if(cls == java.sql.Timestamp.class) {
if(valueStr == null) {
return null;
} else {
Date date = JavaSqlTimeStampFormatForParse.parse(valueStr);
return new Timestamp(date.getTime());
}
} else if(cls == BigDecimal.class) {
return valueStr == null? BigDecimal.valueOf(0) : (new BigDecimal(valueStr));
} else {
return convertJSONToObject(valueStr, cls);
}
}
@Override
protected void BackwardToNode(ITreeNode arg0, int arg1) {
//do nothing
}
@Override
protected void ForwardToNode(ITreeNode arg0, int arg1, boolean arg2) {
//do nothing
}
private static boolean isArray(Class<?> cls) {
return cls.isArray();
}
private static boolean isList(Class<?> cls) {
return List.class.isAssignableFrom(cls);
}
}
| |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2008-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package griffon.javafx.beans.binding;
import griffon.annotations.core.Nonnull;
import javafx.beans.InvalidationListener;
import javafx.beans.binding.BooleanBinding;
import javafx.beans.binding.MapExpression;
import javafx.beans.binding.ObjectBinding;
import javafx.beans.binding.StringBinding;
import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyIntegerProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableMap;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import static java.util.Objects.requireNonNull;
/**
* @author Andres Almiray
* @since 3.0.0
*/
public class MapExpressionDecorator<K, V> extends MapExpression<K, V> {
private final MapExpression<K, V> delegate;
public MapExpressionDecorator(@Nonnull MapExpression<K, V> delegate) {
this.delegate = requireNonNull(delegate, "Argument 'delegate' must not be null");
}
@Nonnull
protected final MapExpression<K, V> getDelegate() {
return delegate;
}
@Override
public boolean equals(Object o) {
return this == o || delegate.equals(o);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
@Override
public String toString() {
return getClass().getName() + ":" + delegate.toString();
}
@Override
public ObservableMap<K, V> getValue() {
return delegate.getValue();
}
@Override
public int getSize() {
return delegate.getSize();
}
@Override
public ReadOnlyIntegerProperty sizeProperty() {
return delegate.sizeProperty();
}
@Override
public ReadOnlyBooleanProperty emptyProperty() {
return delegate.emptyProperty();
}
@Override
public ObjectBinding<V> valueAt(K key) {
return delegate.valueAt(key);
}
@Override
public ObjectBinding<V> valueAt(ObservableValue<K> key) {
return delegate.valueAt(key);
}
@Override
public BooleanBinding isEqualTo(ObservableMap<?, ?> other) {
return delegate.isEqualTo(other);
}
@Override
public BooleanBinding isNotEqualTo(ObservableMap<?, ?> other) {
return delegate.isNotEqualTo(other);
}
@Override
public BooleanBinding isNull() {
return delegate.isNull();
}
@Override
public BooleanBinding isNotNull() {
return delegate.isNotNull();
}
@Override
public StringBinding asString() {
return delegate.asString();
}
@Override
public int size() {
return delegate.size();
}
@Override
public boolean isEmpty() {
return delegate.isEmpty();
}
@Override
public boolean containsKey(Object obj) {
return delegate.containsKey(obj);
}
@Override
public boolean containsValue(Object obj) {
return delegate.containsValue(obj);
}
@Override
public V put(K key, V value) {
return delegate.put(key, value);
}
@Override
public V remove(Object obj) {
return delegate.remove(obj);
}
@Override
public void putAll(Map<? extends K, ? extends V> elements) {
delegate.putAll(elements);
}
@Override
public void clear() {
delegate.clear();
}
@Override
public Set<K> keySet() {
return delegate.keySet();
}
@Override
public Collection<V> values() {
return delegate.values();
}
@Override
public Set<Entry<K, V>> entrySet() {
return delegate.entrySet();
}
@Override
public V get(Object key) {
return delegate.get(key);
}
@Override
public ObservableMap<K, V> get() {
return delegate.get();
}
@Override
public void addListener(ChangeListener<? super ObservableMap<K, V>> listener) {
delegate.addListener(listener);
}
@Override
public void removeListener(ChangeListener<? super ObservableMap<K, V>> listener) {
delegate.removeListener(listener);
}
@Override
public void addListener(InvalidationListener listener) {
delegate.addListener(listener);
}
@Override
public void removeListener(InvalidationListener listener) {
delegate.removeListener(listener);
}
@Override
public void addListener(MapChangeListener<? super K, ? super V> listener) {
delegate.addListener(listener);
}
@Override
public void removeListener(MapChangeListener<? super K, ? super V> listener) {
delegate.removeListener(listener);
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.plus.webapp;
import java.lang.reflect.InvocationTargetException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.net.URL;
import javax.naming.Context;
import javax.naming.InitialContext;
import org.eclipse.jetty.webapp.Descriptor;
import org.eclipse.jetty.webapp.FragmentDescriptor;
import org.eclipse.jetty.webapp.Origin;
import org.eclipse.jetty.webapp.WebAppClassLoader;
import org.eclipse.jetty.webapp.WebAppContext;
import org.eclipse.jetty.webapp.WebDescriptor;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* PlusDescriptorProcessorTest
*
*
*/
public class PlusDescriptorProcessorTest
{
protected WebDescriptor webDescriptor;
protected FragmentDescriptor fragDescriptor1;
protected FragmentDescriptor fragDescriptor2;
protected FragmentDescriptor fragDescriptor3;
protected FragmentDescriptor fragDescriptor4;
protected WebAppContext context;
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception
{
context = new WebAppContext();
context.setClassLoader(new WebAppClassLoader(Thread.currentThread().getContextClassLoader(), context));
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
Context icontext = new InitialContext();
Context compCtx = (Context)icontext.lookup ("java:comp");
compCtx.createSubcontext("env");
Thread.currentThread().setContextClassLoader(oldLoader);
org.eclipse.jetty.plus.jndi.Resource ds = new org.eclipse.jetty.plus.jndi.Resource (context, "jdbc/mydatasource", new Object());
URL webXml = Thread.currentThread().getContextClassLoader().getResource("web.xml");
webDescriptor = new WebDescriptor(org.eclipse.jetty.util.resource.Resource.newResource(webXml));
webDescriptor.parse();
URL frag1Xml = Thread.currentThread().getContextClassLoader().getResource("web-fragment-1.xml");
fragDescriptor1 = new FragmentDescriptor(org.eclipse.jetty.util.resource.Resource.newResource(frag1Xml));
fragDescriptor1.parse();
URL frag2Xml = Thread.currentThread().getContextClassLoader().getResource("web-fragment-2.xml");
fragDescriptor2 = new FragmentDescriptor(org.eclipse.jetty.util.resource.Resource.newResource(frag2Xml));
fragDescriptor2.parse();
URL frag3Xml = Thread.currentThread().getContextClassLoader().getResource("web-fragment-3.xml");
fragDescriptor3 = new FragmentDescriptor(org.eclipse.jetty.util.resource.Resource.newResource(frag3Xml));
fragDescriptor3.parse();
URL frag4Xml = Thread.currentThread().getContextClassLoader().getResource("web-fragment-4.xml");
fragDescriptor4 = new FragmentDescriptor(org.eclipse.jetty.util.resource.Resource.newResource(frag4Xml));
fragDescriptor4.parse();
}
@After
public void tearDown() throws Exception
{
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
Context ic = new InitialContext();
Context compCtx = (Context)ic.lookup ("java:comp");
compCtx.destroySubcontext("env");
Thread.currentThread().setContextClassLoader(oldLoader);
}
@Test
public void testMissingResourceDeclaration()
throws Exception
{
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
try
{
PlusDescriptorProcessor pdp = new PlusDescriptorProcessor();
pdp.process(context, fragDescriptor4);
fail("Expected missing resource declaration");
}
catch (InvocationTargetException ex)
{
Throwable cause = ex.getCause();
assertNotNull(cause);
assertNotNull(cause.getMessage());
assertTrue(cause.getMessage().contains("jdbc/mymissingdatasource"));
}
finally
{
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
@Test
public void testWebXmlResourceDeclarations()
throws Exception
{
//if declared in web.xml, fragment declarations ignored
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
try
{
PlusDescriptorProcessor pdp = new PlusDescriptorProcessor();
pdp.process(context, webDescriptor);
Descriptor d = context.getMetaData().getOriginDescriptor("resource-ref.jdbc/mydatasource");
assertNotNull(d);
assertTrue(d == webDescriptor);
pdp.process(context, fragDescriptor1);
pdp.process(context, fragDescriptor2);
}
finally
{
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
@Test
public void testMismatchedFragmentResourceDeclarations ()
throws Exception
{
//if declared in more than 1 fragment, declarations must be the same
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
try
{
PlusDescriptorProcessor pdp = new PlusDescriptorProcessor();
pdp.process(context, fragDescriptor1);
Descriptor d = context.getMetaData().getOriginDescriptor("resource-ref.jdbc/mydatasource");
assertNotNull(d);
assertTrue(d == fragDescriptor1);
assertEquals(Origin.WebFragment, context.getMetaData().getOrigin("resource-ref.jdbc/mydatasource"));
pdp.process(context, fragDescriptor2);
fail("Expected conflicting resource-ref declaration");
}
catch (Exception e)
{
//expected
}
finally
{
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
@Test
public void testMatchingFragmentResourceDeclarations ()
throws Exception
{
//if declared in more than 1 fragment, declarations must be the same
ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(context.getClassLoader());
try
{
PlusDescriptorProcessor pdp = new PlusDescriptorProcessor();
pdp.process(context, fragDescriptor1);
Descriptor d = context.getMetaData().getOriginDescriptor("resource-ref.jdbc/mydatasource");
assertNotNull(d);
assertTrue(d == fragDescriptor1);
assertEquals(Origin.WebFragment, context.getMetaData().getOrigin("resource-ref.jdbc/mydatasource"));
pdp.process(context, fragDescriptor3);
}
finally
{
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
}
| |
/**
* CompileEnvironment.java
* ---------------------------------
* Copyright (c) 2016
* RESOLVE Software Research Group
* School of Computing
* Clemson University
* All rights reserved.
* ---------------------------------
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
package edu.clemson.cs.rsrg.init;
import edu.clemson.cs.r2jt.rewriteprover.ProverListener;
import edu.clemson.cs.rsrg.absyn.declarations.moduledecl.ModuleDec;
import edu.clemson.cs.rsrg.statushandling.StatusHandler;
import edu.clemson.cs.rsrg.statushandling.WriterStatusHandler;
import edu.clemson.cs.rsrg.statushandling.exception.FlagDependencyException;
import edu.clemson.cs.rsrg.statushandling.exception.MiscErrorException;
import edu.clemson.cs.rsrg.init.file.ResolveFile;
import edu.clemson.cs.rsrg.init.flag.FlagManager;
import edu.clemson.cs.rsrg.misc.Utilities;
import edu.clemson.cs.rsrg.typeandpopulate.symboltables.ScopeRepository;
import edu.clemson.cs.rsrg.typeandpopulate.typereasoning.TypeGraph;
import edu.clemson.cs.rsrg.typeandpopulate.utilities.ModuleIdentifier;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* <p>This class stores all necessary objects and flags needed during
* the compilation environment.</p>
*
* @author Yu-Shan Sun
* @author Daniel Welch
* @version 1.0
*/
public class CompileEnvironment {
// ===========================================================
// Member Fields
// ===========================================================
/**
* <p>This contains the absolute path to the RESOLVE workspace directory.</p>
*/
private File myCompileDir = null;
/**
* <p>This contains all modules we have currently seen. This includes both complete
* and incomplete modules. A module is complete when we are done processing it. An
* incomplete module usually means that we are still processing it's import.</p>
*/
private final Map<ModuleIdentifier, AbstractMap.SimpleEntry<ModuleDec, ResolveFile>> myCompilingModules;
/**
* <p>This map stores all externally realizations for a particular concept.
* The <code>Archiver</code> should be the only one that cares about these files.</p>
*/
private final Map<ModuleIdentifier, File> myExternalRealizFiles;
/**
* <p>This is the default status handler for the RESOLVE compiler.</p>
*/
private final StatusHandler myStatusHandler;
/**
* <p>This list stores all the incomplete modules.</p>
*/
private final List<ModuleIdentifier> myIncompleteModules;
/**
* <p>This listener object provides instant feedback to the
* interested party as soon as the prover is done processing a VC.</p>
*/
private ProverListener myListener = null;
/**
* <p>The symbol table for the compiler.</p>
*/
private ScopeRepository mySymbolTable = null;
/**
* <p>This is the math type graph that indicates relationship
* between different math types.</p>
*/
private TypeGraph myTypeGraph = null;
/**
* <p>This stores all user created files from the WebIDE/WebAPI.</p>
*/
private Map<String, ResolveFile> myUserFileMap;
// ===========================================================
// Objects
// ===========================================================
/**
* <p>This object contains all the flag objects that have been
* created by the different modules.</p>
*/
public final FlagManager flags;
// ===========================================================
// Constructors
// ===========================================================
/**
* <p>Instantiates a compilation environment to store all
* necessary modules, files and flags.</p>
*
* @param args The specified compiler arguments array.
* @param compilerVersion The current compiler version.
* @param statusHandler A status handler to display debug or error messages.
*
* @throws FlagDependencyException
* @throws IOException
*/
public CompileEnvironment(String[] args, String compilerVersion,
StatusHandler statusHandler)
throws FlagDependencyException,
IOException {
flags = new FlagManager(args);
myCompilingModules =
new HashMap<>();
myExternalRealizFiles = new HashMap<>();
myIncompleteModules = new LinkedList<>();
myUserFileMap = new HashMap<>();
// Check for custom workspace path
String path = null;
if (flags.isFlagSet(ResolveCompiler.FLAG_WORKSPACE_DIR)) {
path =
flags.getFlagArgument(ResolveCompiler.FLAG_WORKSPACE_DIR,
"Path");
}
myCompileDir = Utilities.getWorkspaceDir(path);
// Check for file error output flag
if (flags.isFlagSet(ResolveCompiler.FLAG_DEBUG_FILE_OUT)) {
Date date = new Date();
SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH-mm-ss");
File errorFile =
new File(myCompileDir, "Error-Log-"
+ dateFormat.format(date) + ".log");
statusHandler =
new WriterStatusHandler(new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(
errorFile), "utf-8")));
}
myStatusHandler = statusHandler;
// Debugging information
if (flags.isFlagSet(ResolveCompiler.FLAG_DEBUG)) {
synchronized (System.out) {
// Print Compiler Messages
myStatusHandler.info(null, "RESOLVE Compiler/Verifier - "
+ compilerVersion + " Version.");
myStatusHandler.info(null, "\tUse -help flag for options.\n");
}
}
}
// ===========================================================
// Public Methods
// ===========================================================
/**
* <p>Remove the module associated with the <code>ModuleIdentifier</code>
* from our incomplete module stack. This indicates the completion of
* this module.</p>
*
* @param mid Completed module's identifier.
*/
public void completeRecord(ModuleIdentifier mid) {
assert myCompilingModules.containsKey(mid) : "We haven't seen a module with this ID yet!";
assert myIncompleteModules.contains(mid) : "We already completed compilation for a module with this ID!";
myIncompleteModules.remove(mid);
// Print out debugging message
if (flags.isFlagSet(ResolveCompiler.FLAG_DEBUG)) {
myStatusHandler.info(null, "Completed record: " + mid.toString());
}
}
/**
* <p>Constructs a record containing the module id, the file, and the module
* dec, and places it in the module environment. Also places the module into
* a stack that indicates compilation has begun on this module but has not
* completed.</p>
*
* @param file The original source file.
* @param moduleDec The module representation declaration.
*/
public void constructRecord(ResolveFile file, ModuleDec moduleDec) {
ModuleIdentifier mid = new ModuleIdentifier(moduleDec);
assert !myCompilingModules.containsKey(mid) : "We already compiled a module with this ID!";
myCompilingModules.put(mid,
new AbstractMap.SimpleEntry<>(moduleDec,
file));
myIncompleteModules.add(mid);
// Print out debugging message
if (flags.isFlagSet(ResolveCompiler.FLAG_DEBUG)) {
myStatusHandler.info(null, "Construct record: " + mid.toString());
}
}
/**
* <p>Returns true if the specified module is present in the compilation
* environment, has an associated file and a valid module dec.</p>
*
* @param id The ID for the <code>ResolveFile</code> we want to search for.
*/
public boolean containsID(ModuleIdentifier id) {
return myCompilingModules.containsKey(id);
}
/**
* <p>Returns the file associated with the specified id.</p>
*
* @param id The ID for the <code>ResolveFile</code> we want to search for.
*/
public ResolveFile getFile(ModuleIdentifier id) {
return myCompilingModules.get(id).getValue();
}
/**
* <pReturns the <code>ModuleDec</code> associated with the specified id.></p>
*
* @param id The ID for the <code>ResolveFile</code> we want to search for.
*/
public ModuleDec getModuleAST(ModuleIdentifier id) {
return myCompilingModules.get(id).getKey();
}
/**
* <p>Adds this file as an externally realized file.</p>
*
* @param id The ID for the <code>ResolveFile</code> that we want to set as externally realized.
* @param file The externally realized file.
*/
public void addExternalRealizFile(ModuleIdentifier id, File file) {
myExternalRealizFiles.put(id, file);
}
/**
* <p>This checks to see if the module associated with this id is an externally
* realized file.</p>
*
* @param id The ID for the <code>File</code> we want to search for.
*
* @return True if it is externally realized. False otherwise.
*/
public boolean isExternalRealizFile(ModuleIdentifier id) {
return myExternalRealizFiles.containsKey(id);
}
/**
* <p>Returns the compiler's status handler object.</p>
*
* @return A {@link StatusHandler} object.
*/
public StatusHandler getStatusHandler() {
return myStatusHandler;
}
/**
* <p>Returns a pointer to the current
* RESOLVE workspace directory.</p>
*
* @return A <code>File</code> object
*/
public File getWorkspaceDir() {
return myCompileDir;
}
/**
* <p>Returns the remaining arguments not handled by the
* compile environment.</p>
*
* @return All the remaining arguments that the caller needs to handle.
*/
public String[] getRemainingArgs() {
return flags.getRemainingArgs();
}
/**
* <p>The symbol table containing all symbol information.</p>
*
* @return The symbol table for the compiler.
*/
public ScopeRepository getSymbolTable() {
return mySymbolTable;
}
/**
* <p>The type graph containing all the type relationships.</p>
*
* @return The type graph for the compiler.
*/
public TypeGraph getTypeGraph() {
return myTypeGraph;
}
/**
* <p>Returns <code>ResolveFile</code> for the specified string
* object. Notice that the pre-condition for this method is that
* the key exist in the map.</p>
*
* @param key Name of the file.
*
* @return The <code>ResolveFile</code> object for the specified key.
*/
public ResolveFile getUserFileFromMap(String key) {
return myUserFileMap.get(key);
}
/**
* <p>This checks to see if the file is a user created file from the
* WebIDE/WebAPI.</p>
*
* @param key Name of the file.
*
* @return True if it is a user created file from the WebIDE/WebAPI.
* False otherwise.
*/
public boolean isMetaFile(String key) {
return myUserFileMap.containsKey(key);
}
/**
* <p>Used to set a map of user files when invoking the compiler from
* the WebIDE/WebAPI.</p>
*
* @param fMap The map of user created files.
*/
public void setFileMap(Map<String, ResolveFile> fMap) {
myUserFileMap = fMap;
}
/**
* <p>Adds a listerner for the prover.</p>
*
* @param listener The listener object that is going to communicate
* results from/to.
*/
public void setProverListener(ProverListener listener) {
myListener = listener;
}
/**
* <p>Sets this table as our new symbol table.</p>
*
* @param table The newly created and blank symbol table.
*/
public void setSymbolTable(ScopeRepository table) {
if (table == null) {
throw new MiscErrorException(
"Symbol table may not be set to null!",
new IllegalArgumentException());
}
if (mySymbolTable != null) {
throw new MiscErrorException("Symbol table may only be set once!",
new IllegalStateException());
}
mySymbolTable = table;
}
/**
* <p>Sets a new type graph to indicate relationship between types.</p>
*
* @param t The newly created type graph.
*/
public void setTypeGraph(TypeGraph t) {
myTypeGraph = t;
}
}
| |
package eu.fbk.utils.core;
import com.google.common.base.*;
import com.google.common.base.Objects;
import com.google.common.collect.*;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.*;
public abstract class Graph<V, E> implements Serializable {
private static final long serialVersionUID = 1L;
@Nullable
private transient Set<E> labels;
@Nullable
private transient Set<V> neighbours;
@Nullable
private transient Set<V> sources;
@Nullable
private transient Set<V> targets;
@Nullable
private transient Set<V> roots;
@Nullable
private transient Set<V> leaves;
abstract Set<V> doGetVertices();
abstract Set<Edge<V, E>> doGetEdges();
abstract Set<Edge<V, E>> doGetEdges(V vertex);
abstract Graph<V, E> doFilter(@Nullable Predicate<V> vertexFilter,
@Nullable Predicate<Edge<V, E>> edgeFilter);
public final Set<E> getLabels() {
if (this.labels == null) {
final Set<E> labels = Sets.newHashSet();
for (final Edge<V, E> edge : doGetEdges()) {
labels.add(edge.getLabel());
}
this.labels = Collections.unmodifiableSet(labels);
}
return this.labels;
}
public final Set<E> getLabels(@Nullable final V vertex) {
final Set<E> labels = Sets.newHashSet();
for (final Edge<V, E> edge : doGetEdges(vertex)) {
labels.add(edge.getLabel());
}
return Collections.unmodifiableSet(labels);
}
public final Set<E> getLabels(@Nullable final V source, @Nullable final V target) {
Set<Edge<V, E>> edgesToFilter;
if (source != null) {
edgesToFilter = doGetEdges(source);
} else if (target != null) {
edgesToFilter = doGetEdges(target);
} else {
return getLabels();
}
final Set<E> labels = Sets.newHashSet();
for (final Edge<V, E> edge : edgesToFilter) {
if (source == null || edge.getSource().equals(source) || target == null
|| edge.getTarget().equals(target)) {
labels.add(edge.getLabel());
}
}
return Collections.unmodifiableSet(labels);
}
public final Set<Edge<V, E>> getEdges() {
return doGetEdges();
}
public final Set<Edge<V, E>> getEdges(@Nullable final V vertex) {
return vertex == null ? doGetEdges() : doGetEdges(vertex);
}
public final Set<Edge<V, E>> getEdges(@Nullable final V source, @Nullable final V target) {
Set<Edge<V, E>> edgesToFilter;
if (source != null) {
edgesToFilter = doGetEdges(source);
} else if (target != null) {
edgesToFilter = doGetEdges(target);
} else {
return doGetEdges();
}
final List<Edge<V, E>> edges = Lists.newArrayList();
for (final Edge<V, E> edge : edgesToFilter) {
if (source == null || edge.getSource().equals(source) || target == null
|| edge.getTarget().equals(target)) {
edges.add(edge);
}
}
return ImmutableSet.copyOf(edges);
}
public final Set<V> getVertices() {
return doGetVertices();
}
public final Set<V> getNeighbours() {
if (this.neighbours == null) {
final Set<V> neighbours = Sets.newHashSet();
for (final Edge<V, E> edge : doGetEdges()) {
neighbours.add(edge.getSource());
neighbours.add(edge.getTarget());
}
this.neighbours = ImmutableSet.copyOf(neighbours);
}
return this.neighbours;
}
public final Set<V> getNeighbours(final V vertex) {
final List<V> neighbours = Lists.newArrayList();
for (final Edge<V, E> edge : doGetEdges(vertex)) {
if (edge.getSource().equals(vertex)) {
neighbours.add(edge.getTarget());
} else {
neighbours.add(edge.getSource());
}
}
return ImmutableSet.copyOf(neighbours);
}
public final Set<V> getSources() {
if (this.sources == null) {
final Set<V> sources = Sets.newHashSet();
for (final Edge<V, E> edge : doGetEdges()) {
sources.add(edge.getSource());
}
this.sources = ImmutableSet.copyOf(sources);
}
return this.sources;
}
public final Set<V> getSources(final V vertex) {
final List<V> sources = Lists.newArrayList();
for (final Edge<V, E> edge : doGetEdges(vertex)) {
if (edge.getTarget().equals(vertex)) {
sources.add(edge.getSource());
}
}
return ImmutableSet.copyOf(sources);
}
public final Set<V> getTargets() {
if (this.targets == null) {
final Set<V> targets = Sets.newHashSet();
for (final Edge<V, E> edge : doGetEdges()) {
targets.add(edge.getTarget());
}
this.targets = ImmutableSet.copyOf(targets);
}
return this.targets;
}
public final Set<V> getTargets(final V vertex) {
final List<V> targets = Lists.newArrayList();
for (final Edge<V, E> edge : doGetEdges(vertex)) {
if (edge.getSource().equals(vertex)) {
targets.add(edge.getTarget());
}
}
return ImmutableSet.copyOf(targets);
}
public final Set<V> getRoots() {
if (this.roots == null) {
final Set<V> roots = Sets.newHashSet(doGetVertices());
for (final Edge<V, E> edge : doGetEdges()) {
roots.remove(edge.getTarget());
}
this.roots = ImmutableSet.copyOf(roots);
}
return this.roots;
}
public final Set<V> getLeaves() {
if (this.leaves == null) {
final Set<V> leaves = Sets.newHashSet(doGetVertices());
for (final Edge<V, E> edge : doGetEdges()) {
leaves.remove(edge.getSource());
}
this.leaves = ImmutableSet.copyOf(leaves);
}
return this.leaves;
}
public final Set<Path<V, E>> getPaths(final V source, final V target, final boolean directed,
final int maxLength) {
final Multimap<V, List<Edge<V, E>>> map = HashMultimap.create();
map.put(source, Collections.emptyList());
int length = 0;
Set<V> frontier = ImmutableSet.of(source);
while (!frontier.isEmpty() && map.get(target).isEmpty() && length < maxLength) {
++length;
final Set<V> seen = ImmutableSet.copyOf(map.keySet());
for (final V vertex : frontier) {
final Collection<List<Edge<V, E>>> paths = map.get(vertex);
final Set<Edge<V, E>> edges = directed ? getEdges(vertex, null) : getEdges(vertex);
for (final Edge<V, E> edge : edges) {
final V otherVertex = edge.getOpposite(vertex);
if (!seen.contains(otherVertex)) {
for (final List<Edge<V, E>> path : paths) {
final List<Edge<V, E>> newPath = Lists.newArrayList(path);
newPath.add(edge);
map.put(otherVertex, newPath);
}
}
}
}
frontier = ImmutableSet.copyOf(Sets.difference(map.keySet(), frontier));
}
final List<Path<V, E>> paths = Lists.newArrayList();
for (final List<Edge<V, E>> path : map.get(target)) {
paths.add(Path.create(source, target, path));
}
return ImmutableSet.copyOf(paths);
}
public final Graph<V, E> filterLabels(final Iterable<E> labels) {
final Set<E> labelSet = labels instanceof Set<?> ? (Set<E>) labels : Sets
.newHashSet(labels);
return doFilter(null, new Predicate<Edge<V, E>>() {
@Override
public boolean apply(final Edge<V, E> edge) {
return labelSet.contains(edge.getLabel());
}
});
}
public final Graph<V, E> filterEdges(final Iterable<Edge<V, E>> edges) {
final Set<Edge<V, E>> edgeSet = edges instanceof Set<?> ? (Set<Edge<V, E>>) edges : Sets
.newHashSet(edges);
return doFilter(null, Predicates.in(edgeSet));
}
public final Graph<V, E> filterVertices(final Iterable<V> vertices) {
final Set<V> vertexSet = vertices instanceof Set<?> ? (Set<V>) vertices : Sets
.newHashSet(vertices);
return doFilter(Predicates.in(vertexSet), new Predicate<Edge<V, E>>() {
@Override
public boolean apply(final Edge<V, E> edge) {
return vertexSet.contains(edge.getSource())
&& vertexSet.contains(edge.getTarget());
}
});
}
public final Graph<V, E> filter(@Nullable final Predicate<V> vertexFilter,
@Nullable final Predicate<Edge<V, E>> edgeFilter) {
if (vertexFilter == null) {
return edgeFilter == null ? this : doFilter(null, edgeFilter);
} else if (edgeFilter == null) {
return doFilter(vertexFilter, new Predicate<Edge<V, E>>() {
@Override
public boolean apply(final Edge<V, E> edge) {
return vertexFilter.apply(edge.getSource())
&& vertexFilter.apply(edge.getTarget());
}
});
} else {
return doFilter(vertexFilter, new Predicate<Edge<V, E>>() {
@Override
public boolean apply(final Edge<V, E> edge) {
return edgeFilter.apply(edge) && vertexFilter.apply(edge.getSource())
&& vertexFilter.apply(edge.getTarget());
}
});
}
}
@Override
public final boolean equals(final Object object) {
if (object == this) {
return true;
}
if (!(object instanceof Graph<?, ?>)) {
return false;
}
final Graph<?, ?> other = (Graph<?, ?>) object;
return doGetVertices().equals(other.doGetVertices())
&& doGetEdges().equals(other.doGetEdges());
}
@Override
public final int hashCode() {
return Objects.hashCode(doGetVertices(), doGetEdges());
}
@Override
public final String toString() {
final StringBuilder builder = new StringBuilder();
builder.append("V(").append(doGetVertices().size()).append(") = {");
Joiner.on(", ").appendTo(builder, doGetVertices());
builder.append("}, E(").append(doGetEdges().size()).append(") = {");
Joiner.on(", ").appendTo(builder, doGetEdges());
builder.append("}");
return builder.toString();
}
public static <V, E> Builder<V, E> builder() {
return new Builder<V, E>();
}
public static final class Builder<V, E> {
private final Set<V> vertices;
private final Set<Edge<V, E>> edges;
private Builder() {
this.vertices = Sets.newHashSet();
this.edges = Sets.newHashSet();
}
public Builder<V, E> addVertices(@SuppressWarnings("unchecked") final V... vertices) {
return addVertices(Arrays.asList(vertices));
}
public Builder<V, E> addVertices(final Iterable<? extends V> vertices) {
Iterables.addAll(this.vertices, vertices);
return this;
}
public Builder<V, E> addEdges(@SuppressWarnings("unchecked") final Edge<V, E>... edges) {
return addEdges(Arrays.asList(edges));
}
public Builder<V, E> addEdges(final Iterable<Edge<V, E>> edges) {
for (final Edge<V, E> edge : edges) {
this.edges.add(edge);
this.vertices.add(edge.getSource());
this.vertices.add(edge.getTarget());
}
return this;
}
public Builder<V, E> addEdges(final V source, final V target,
@SuppressWarnings("unchecked") final E... labels) {
this.vertices.add(source);
this.vertices.add(target);
for (final E label : labels) {
this.edges.add(Edge.create(source, target, label));
}
return this;
}
public Graph<V, E> build() {
return new ConcreteGraph<V, E>(ImmutableSet.copyOf(this.vertices),
ImmutableSet.copyOf(this.edges));
}
}
public static final class Edge<V, E> implements Serializable {
private static final long serialVersionUID = 1L;
private final V source;
private final V target;
@Nullable
private final E label;
private Edge(final V source, final V target, @Nullable final E label) {
this.source = source;
this.target = target;
this.label = label;
}
public static <V, E> Edge<V, E> create(final V source, final V target,
@Nullable final E label) {
return new Edge<V, E>(Preconditions.checkNotNull(source),
Preconditions.checkNotNull(target), label);
}
public V getSource() {
return this.source;
}
public V getTarget() {
return this.target;
}
public V getOpposite(final V vertex) {
if (this.source.equals(vertex)) {
return this.target;
} else if (this.target.equals(vertex)) {
return this.source;
} else {
throw new IllegalArgumentException("Vertex " + vertex + " not contained in "
+ this);
}
}
@Nullable
public E getLabel() {
return this.label;
}
@Override
public boolean equals(final Object object) {
if (object == this) {
return true;
}
if (!(object instanceof Edge)) {
return false;
}
final Edge<?, ?> other = (Edge<?, ?>) object;
return this.source.equals(other.source) && this.target.equals(other.target)
&& Objects.equal(this.label, other.label);
}
@Override
public int hashCode() {
return Objects.hashCode(this.source, this.target, this.label);
}
@Override
public String toString() {
return this.source + " -" + this.label + "-> " + this.target;
}
}
public static final class Path<V, E> implements Serializable {
private static final long serialVersionUID = 1L;
private final List<Edge<V, E>> edges;
private final List<V> vertices;
@Nullable
private transient List<E> labels;
@Nullable
private transient Boolean directed;
private Path(final Iterable<Edge<V, E>> edges, final Iterable<V> vertices) {
this.edges = ImmutableList.copyOf(edges);
this.vertices = ImmutableList.copyOf(vertices);
}
public static <V, E> Path<V, E> create(final V source, final V target,
final Iterable<Edge<V, E>> edges) {
final List<Edge<V, E>> edgeList = ImmutableList.copyOf(edges);
final List<V> verticesList = Lists.newArrayListWithCapacity(edgeList.size() + 1);
V vertex = source;
for (final Edge<V, E> edge : edgeList) {
verticesList.add(vertex);
if (edge.getSource().equals(vertex)) {
vertex = edge.getTarget();
} else if (edge.getTarget().equals(vertex)) {
vertex = edge.getSource();
} else {
throw new IllegalArgumentException("Invalid path");
}
}
verticesList.add(vertex);
if (!vertex.equals(target)) {
throw new IllegalArgumentException("Invalid path");
}
return new Path<V, E>(edgeList, verticesList);
}
public int length() {
return this.edges.size();
}
public V getSource() {
return this.vertices.get(0);
}
public V getTarget() {
return this.vertices.get(this.vertices.size() - 1);
}
public List<V> getVertices() {
return this.vertices;
}
public List<Edge<V, E>> getEdges() {
return this.edges;
}
public List<E> getLabels() {
if (this.labels == null) {
final List<E> labels = Lists.newArrayListWithCapacity(this.edges.size());
for (final Edge<V, E> edge : this.edges) {
labels.add(edge.getLabel());
}
this.labels = ImmutableList.copyOf(labels);
}
return this.labels;
}
public boolean isDirected() {
if (this.directed == null) {
boolean directed = true;
for (int i = 0; i < this.edges.size(); ++i) {
if (!this.edges.get(i).getSource().equals(this.vertices.get(i))) {
directed = false;
break;
}
}
this.directed = directed;
}
return this.directed;
}
@Override
public boolean equals(final Object object) {
if (object == this) {
return true;
}
if (!(object instanceof Path)) {
return false;
}
final Path<?, ?> path = (Path<?, ?>) object;
return this.edges.equals(path.edges);
}
@Override
public int hashCode() {
return this.edges.hashCode();
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < this.edges.size(); ++i) {
final V vertex = this.vertices.get(i);
final Edge<V, E> edge = this.edges.get(i);
builder.append(vertex);
if (edge.getSource().equals(vertex)) {
builder.append(" -").append(edge.getLabel()).append("-> ");
} else {
builder.append(" <-").append(edge.getLabel()).append("- ");
}
}
builder.append(this.vertices.get(this.vertices.size() - 1));
return builder.toString();
}
}
private static final class ConcreteGraph<V, E> extends Graph<V, E> {
private static final long serialVersionUID = 1L;
private final Set<V> vertices;
private final Set<Edge<V, E>> edges;
@Nullable
private transient Map<V, Set<Edge<V, E>>> map;
ConcreteGraph(final Set<V> vertices, final Set<Edge<V, E>> edges) {
this.vertices = vertices;
this.edges = edges;
}
@Override
Set<V> doGetVertices() {
return this.vertices;
}
@Override
Set<Edge<V, E>> doGetEdges() {
return this.edges;
}
@Override
Set<Edge<V, E>> doGetEdges(final V vertex) {
if (this.map == null) {
final Map<V, List<Edge<V, E>>> map = Maps.newHashMap();
for (final Edge<V, E> edge : this.edges) {
List<Edge<V, E>> sourceEdges = map.get(edge.getSource());
List<Edge<V, E>> targetEdges = map.get(edge.getTarget());
if (sourceEdges == null) {
sourceEdges = Lists.newArrayList();
map.put(edge.getSource(), sourceEdges);
}
if (targetEdges == null) {
targetEdges = Lists.newArrayList();
map.put(edge.getTarget(), targetEdges);
}
sourceEdges.add(edge);
targetEdges.add(edge);
}
final ImmutableMap.Builder<V, Set<Edge<V, E>>> builder = ImmutableMap.builder();
for (final Map.Entry<V, List<Edge<V, E>>> entry : map.entrySet()) {
builder.put(entry.getKey(), ImmutableSet.copyOf(entry.getValue()));
}
this.map = builder.build();
}
final Set<Edge<V, E>> edges = this.map.get(vertex);
return edges == null ? ImmutableSet.of() : edges;
}
@Override
Graph<V, E> doFilter(final Predicate<V> vertexFilter,
final Predicate<Edge<V, E>> edgeFilter) {
return new FilteredGraph<V, E>(this, vertexFilter, edgeFilter);
}
}
private static final class FilteredGraph<V, E> extends Graph<V, E> {
private static final long serialVersionUID = 1L;
private final Graph<V, E> graph;
@Nullable
private final Predicate<V> vertexFilter;
@Nullable
private final Predicate<Edge<V, E>> edgeFilter;
FilteredGraph(final Graph<V, E> graph, @Nullable final Predicate<V> vertexFilter,
@Nullable final Predicate<Edge<V, E>> edgeFilter) {
this.graph = graph;
this.vertexFilter = vertexFilter;
this.edgeFilter = edgeFilter;
}
@Override
Set<Edge<V, E>> doGetEdges() {
return this.edgeFilter == null ? this.graph.doGetEdges() : Sets.filter(
this.graph.doGetEdges(), this.edgeFilter);
}
@Override
Set<Edge<V, E>> doGetEdges(final V vertex) {
return this.edgeFilter == null ? this.graph.doGetEdges(vertex) : Sets.filter(
this.graph.doGetEdges(vertex), this.edgeFilter);
}
@Override
Set<V> doGetVertices() {
return this.vertexFilter == null ? this.graph.doGetVertices() : Sets.filter(
this.graph.doGetVertices(), this.vertexFilter);
}
@Override
Graph<V, E> doFilter(@Nullable final Predicate<V> vertexFilter,
@Nullable final Predicate<Edge<V, E>> edgeFilter) {
final Predicate<V> newVertexFilter = this.vertexFilter == null ? vertexFilter
: vertexFilter == null ? this.vertexFilter : Predicates.and(this.vertexFilter,
vertexFilter);
final Predicate<Edge<V, E>> newEdgeFilter = this.edgeFilter == null ? edgeFilter
: edgeFilter == null ? this.edgeFilter : Predicates.and(this.edgeFilter,
edgeFilter);
return new FilteredGraph<V, E>(this.graph, newVertexFilter, newEdgeFilter);
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2017 CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package jenkins.plugins.git.traits;
import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.Util;
import hudson.model.Descriptor;
import hudson.plugins.git.GitSCM;
import hudson.plugins.git.extensions.GitSCMExtension;
import hudson.plugins.git.extensions.GitSCMExtensionDescriptor;
import hudson.plugins.git.extensions.impl.LocalBranch;
import hudson.scm.SCM;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import javax.annotation.CheckForNull;
import jenkins.model.Jenkins;
import jenkins.plugins.git.AbstractGitSCMSource;
import jenkins.plugins.git.GitSCMBuilder;
import jenkins.plugins.git.GitSCMSourceContext;
import jenkins.scm.api.SCMSource;
import jenkins.scm.api.trait.SCMBuilder;
import jenkins.scm.api.trait.SCMSourceContext;
import jenkins.scm.api.trait.SCMSourceTrait;
import jenkins.scm.api.trait.SCMSourceTraitDescriptor;
import jenkins.scm.api.trait.SCMTrait;
import org.jvnet.tiger_types.Types;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.DataBoundConstructor;
/**
* Base class for the {@link Descriptor} of a {@link GitSCMExtension}.
*
* @since 3.4.0
*/
public abstract class GitSCMExtensionTraitDescriptor extends SCMSourceTraitDescriptor {
/**
* The type of {@link GitSCMExtension}.
*/
@NonNull
private final Class<? extends GitSCMExtension> extension;
/**
* The constructor to use in {@link #convertToTrait(GitSCMExtension)} or {@code null} if the implementation
* class is handling conversion.
*/
@CheckForNull
private final Constructor<? extends SCMSourceTrait> constructor;
/**
* {@code true} if {@link #constructor} does not take any parameters, {@code false} if it takes a single parameter
* of type {@link GitSCMExtension}.
*/
private final boolean noArgConstructor;
/**
* Constructor to use when type inference using {@link #GitSCMExtensionTraitDescriptor()} does not work.
*
* @param clazz Pass in the type of {@link SCMTrait}
* @param extension Pass in the type of {@link GitSCMExtension}.
*/
protected GitSCMExtensionTraitDescriptor(Class<? extends SCMSourceTrait> clazz,
Class<? extends GitSCMExtension> extension) {
super(clazz);
this.extension = extension;
if (!Util.isOverridden(GitSCMExtensionTraitDescriptor.class, getClass(), "convertToTrait",
GitSCMExtension.class)) {
// check that the GitSCMExtensionTrait has a constructor that takes a single argument of the type
// 'extension' so that our default convertToTrait method implementation can be used
try {
constructor = clazz.getConstructor(extension);
noArgConstructor = constructor.getParameterTypes().length == 0;
} catch (NoSuchMethodException e) {
throw new AssertionError("Could not infer how to convert a " + extension + " to a "
+ clazz + " as there is no obvious constructor. Either provide a simple constructor or "
+ "override convertToTrait(GitSCMExtension)", e);
}
} else {
constructor = null;
noArgConstructor = false;
}
}
/**
* Infers the type of the corresponding {@link GitSCMExtensionTrait} from the outer class.
* This version works when you follow the common convention, where a descriptor
* is written as the static nested class of the describable class.
*/
protected GitSCMExtensionTraitDescriptor() {
super();
Type bt = Types.getBaseClass(clazz, GitSCMExtensionTrait.class);
if (bt instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) bt;
// this 'extension' is the closest approximation of E of GitSCMExtensionTrait<E>.
extension = Types.erasure(pt.getActualTypeArguments()[0]);
if (!GitSCMExtension.class.isAssignableFrom(extension) || GitSCMExtension.class == extension) {
throw new AssertionError("Could not infer GitSCMExtension type for outer class " + clazz
+ " of " + getClass() + ". Perhaps wrong outer class? (or consider using the explicit "
+ "class constructor)");
}
} else {
throw new AssertionError("Could not infer GitSCMExtension type. Consider using the explicit "
+ "class constructor)");
}
if (!Util.isOverridden(GitSCMExtensionTraitDescriptor.class, getClass(), "convertToTrait",
GitSCMExtension.class)) {
// check that the GitSCMExtensionTrait has a constructor that takes a single argument of the type
// 'extension' so that our default convertToTrait method implementation can be used
Constructor<? extends SCMSourceTrait> constructor = null;
for (Constructor<?> c : clazz.getConstructors()) {
if (c.getAnnotation(DataBoundConstructor.class) != null) {
constructor = (Constructor<? extends SCMSourceTrait>) c;
break;
}
}
if (constructor != null) {
Class<?>[] parameterTypes = constructor.getParameterTypes();
if (parameterTypes.length == 0) {
this.constructor = constructor;
this.noArgConstructor = true;
} else if (parameterTypes.length == 1 && extension.equals(parameterTypes[0])) {
this.constructor = constructor;
this.noArgConstructor = false;
} else {
throw new AssertionError("Could not infer how to convert a " + extension + " to a "
+ clazz + " as the @DataBoundConstructor is neither zero arg nor single arg of type "
+ extension + ". Either provide a simple constructor or override "
+ "convertToTrait(GitSCMExtension)");
}
} else {
throw new AssertionError("Could not infer how to convert a " + extension + " to a "
+ clazz + " as there is no @DataBoundConstructor (which is going to cause other problems)");
}
} else {
constructor = null;
this.noArgConstructor = false;
}
}
/**
* {@inheritDoc}
*/
@Override
public Class<? extends SCMBuilder> getBuilderClass() {
return GitSCMBuilder.class;
}
/**
* {@inheritDoc}
*/
@Override
public Class<? extends SCM> getScmClass() {
return GitSCM.class;
}
/**
* Returns the {@link GitSCMExtensionDescriptor} for this {@link #getExtensionClass()}.
*
* @return the {@link GitSCMExtensionDescriptor} for this {@link #getExtensionClass()}.
*/
@Restricted(NoExternalUse.class) // intended for use from stapler / jelly only
public GitSCMExtensionDescriptor getExtensionDescriptor() {
return (GitSCMExtensionDescriptor) Jenkins.getActiveInstance().getDescriptor(extension);
}
/**
* Returns the type of {@link GitSCMExtension} that the {@link GitSCMExtensionTrait} wraps.
*
* @return the type of {@link GitSCMExtension} that the {@link GitSCMExtensionTrait} wraps.
*/
public Class<? extends GitSCMExtension> getExtensionClass() {
return extension;
}
/**
* Converts the supplied {@link GitSCMExtension} (which must be of type {@link #getExtensionClass()}) into
* its corresponding {@link GitSCMExtensionTrait}.
*
* The default implementation assumes that the {@link #clazz} has a public constructor taking either no arguments
* or a single argument of type {@link #getExtensionClass()} and will just call that. Override this method if you
* need more complex conversion logic, for example {@link LocalBranch} only makes sense for a
* {@link LocalBranch#getLocalBranch()} value of {@code **} so
* {@link LocalBranchTrait.DescriptorImpl#convertToTrait(GitSCMExtension)} returns {@code null} for all other
* {@link LocalBranch} configurations.
*
* @param extension the {@link GitSCMExtension} (must be of type {@link #getExtensionClass()})
* @return the {@link GitSCMExtensionTrait} or {@code null} if the supplied {@link GitSCMExtension} is not
* appropriate for conversion to a {@link GitSCMExtensionTrait}
* @throws UnsupportedOperationException if the conversion failed because of a implementation bug.
*/
@CheckForNull
public SCMSourceTrait convertToTrait(@NonNull GitSCMExtension extension) {
if (!this.extension.isInstance(extension)) {
throw new IllegalArgumentException(
"Expected a " + this.extension.getName() + " but got a " + extension.getClass().getName()
);
}
if (constructor == null) {
if (!Util.isOverridden(GitSCMExtensionTraitDescriptor.class, getClass(), "convertToTrait",
GitSCMExtension.class)) {
throw new IllegalStateException("Should not be able to instantiate a " + getClass().getName()
+ " without an inferred constructor for " + this.extension.getName());
}
throw new UnsupportedOperationException(
getClass().getName() + " should not delegate convertToTrait() to " + GitSCMExtension.class
.getName());
}
try {
return noArgConstructor
? constructor.newInstance()
: constructor.newInstance(this.extension.cast(extension));
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | ClassCastException e) {
throw new UnsupportedOperationException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String getHelpFile() {
String primary = super.getHelpFile();
return primary == null ? getExtensionDescriptor().getHelpFile() : primary;
}
}
| |
package nlp.common.util;
/**
* This class contains util operations for data structure like array, arrayList, HashMap etc.
*
* */
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
public class Util {
/**
* String Operations
* */
public static void tokenize(String line, ArrayList<String> tokens) {
StringTokenizer strTok = new StringTokenizer(line);
while (strTok.hasMoreTokens()) {
String token = strTok.nextToken();
tokens.add(token);
}
}
/**
* Print
* */
public static void print(ArrayList tokens) {
for (int i = 0; i < tokens.size(); i++) {
System.out.print(tokens.get(i) + " ");
}
System.out.print("\n");
System.out.print("\n");
}
public static void print(String[] files) {
for (int i = 0; i < files.length; i++) {
System.out.print(files[i] + " ");
}
System.out.print("\n");
}
public static void print(double[] probs, String string, String end) {
for(int i = 0; i < probs.length; i++) {
System.out.print(probs[i] + string);
}
System.out.print(end);
}
/**
* HashMap Operations
* */
public static void printHash(HashMap<String, Integer> hashMap) {
System.out.println("Print HashMap");
Set s = hashMap.entrySet();
Iterator it = s.iterator();
while (it.hasNext()) {
Map.Entry m = (Map.Entry) it.next();
System.out.println(m.getKey() + "\t" + m.getValue());
}
}
public static ArrayList<String> getHashMap(HashMap<String, String> hm) {
ArrayList<String> a = new ArrayList<String>();
Set s = hm.entrySet();
Iterator it = s.iterator();
while (it.hasNext()) {
Map.Entry m = (Map.Entry) it.next();
a.add(m.getKey() + "\t" + m.getValue());
}
return a;
}
public static ArrayList<String> getHashMap2(HashMap<String, Integer> hm) {
ArrayList<String> a = new ArrayList<String>();
Set s = hm.entrySet();
Iterator it = s.iterator();
while (it.hasNext()) {
Map.Entry m = (Map.Entry) it.next();
a.add(m.getKey() + "\t" + m.getValue());
}
return a;
}
public static String getKeysFromValue(HashMap<Integer, String> hm,
String value) {
Set s = hm.entrySet();
// Move next key and value of HashMap by iterator
Iterator it = s.iterator();
while (it.hasNext()) {
// key=value separator this by Map.Entry to get key and value
Map.Entry m = (Map.Entry) it.next();
if (m.getValue().equals(value))
return m.getKey() + "";
}
System.err.println("Error, can't find the data in Hashmap!");
return null;
}
public static void readHash(String type_map, HashMap<String, String> typeMap) {
ArrayList<String> types = new ArrayList<String>();
ArrayList<String> tokens = new ArrayList<String>();
if (type_map != null) {
FileUtil.readLines(type_map, types);
for (int i = 0; i < types.size(); i++) {
if (!types.get(i).isEmpty()) {
Util.tokenize(types.get(i), tokens);
if (tokens.size() != 0) {
if (tokens.size() != 2) {
for (int j = 0; j < tokens.size(); j++) {
System.out.print(tokens.get(j) + " ");
}
System.err
.println(type_map
+ " Error ! Not two elements in one line !");
return;
}
if (!typeMap.containsKey(tokens.get(0)))
typeMap.put(tokens.get(0), tokens.get(1));
else {
System.out.println(tokens.get(0) + " "
+ tokens.get(1));
System.err.println(type_map
+ " Error ! Same type in first column !");
return;
}
}
tokens.clear();
}
}
}
}
public static void readHash2(String type_map,
HashMap<String, Integer> hashMap) {
ArrayList<String> types = new ArrayList<String>();
ArrayList<String> tokens = new ArrayList<String>();
if (type_map != null) {
FileUtil.readLines(type_map, types);
for (int i = 0; i < types.size(); i++) {
if (!types.get(i).isEmpty()) {
Util.tokenize(types.get(i), tokens);
if (tokens.size() != 0) {
if (tokens.size() != 2) {
for (int j = 0; j < tokens.size(); j++) {
System.out.print(tokens.get(j) + " ");
}
System.err
.println(type_map
+ " Error ! Not two elements in one line !");
return;
}
if (!hashMap.containsKey(tokens.get(0)))
hashMap.put(tokens.get(0),
new Integer(tokens.get(1)));
else {
System.out.println(tokens.get(0) + " "
+ tokens.get(1));
System.err.println(type_map
+ " Error ! Same type in first column !");
return;
}
}
tokens.clear();
}
}
}
}
public static void readHash3(String type_map,
HashMap<String, Double> hashMap) {
ArrayList<String> types = new ArrayList<String>();
ArrayList<String> tokens = new ArrayList<String>();
if (type_map != null) {
FileUtil.readLines(type_map, types);
for (int i = 0; i < types.size(); i++) {
if (!types.get(i).isEmpty()) {
Util.tokenize(types.get(i), tokens);
if (tokens.size() != 0) {
if (tokens.size() != 2) {
for (int j = 0; j < tokens.size(); j++) {
System.out.print(tokens.get(j) + " ");
}
System.err
.println(type_map
+ " Error ! Not two elements in one line !");
return;
}
if (!hashMap.containsKey(tokens.get(0)))
hashMap.put(tokens.get(0),
new Double(tokens.get(1)));
else {
System.out.println(tokens.get(0) + " "
+ tokens.get(1));
System.err.println(type_map
+ " Error ! Same type in first column !");
return;
}
}
tokens.clear();
}
}
}
}
public static double readHashTopValue(HashMap<String, Integer> scores, int k) {
List list = new LinkedList(scores.entrySet());
int count = 0;
int value = 0;
double res = 0;
for (Iterator it = list.iterator(); count < k && it.hasNext();) {
Map.Entry entry = (Map.Entry) it.next();
value = (Integer) entry.getValue();
res += (double) value * Math.log(2) / Math.log(count + 2);
// res += (Integer) entry.getValue();
count++;
}
return res;
}
/**
* Frequently used functions
* */
static public int count(String a, String contains) {
int i = 0;
int count = 0;
while (a.contains(contains)) {
i = a.indexOf(contains);
a = a.substring(0, i)
+ a.substring(i + contains.length(), a.length());
count++;
}
return count;
}
@SuppressWarnings("unchecked")
public static HashMap<?,?> sortByValue(HashMap<?,?> map, final int flag) {
// flag = 0 decreasing order otherwise increasing
List list = new LinkedList(map.entrySet());
Collections.sort(list, new Comparator() {
public int compare(Object o1, Object o2) {
if(flag == 0 )
return ((Comparable) ((Map.Entry) (o2)).getValue())
.compareTo(((Map.Entry) (o1)).getValue());
else
return ((Comparable) ((Map.Entry) (o1)).getValue())
.compareTo(((Map.Entry) (o2)).getValue());
}
});
HashMap result = new LinkedHashMap();
for (Iterator it = list.iterator(); it.hasNext();) {
Map.Entry entry = (Map.Entry) it.next();
result.put(entry.getKey(), entry.getValue());
}
return result;
}
public static double getSumValue(HashMap<String, Double> map) {
Double count = 0.0D;
List list = new LinkedList(map.entrySet());
for (Iterator it = list.iterator(); it.hasNext();) {
Map.Entry entry = (Map.Entry) it.next();
count += map.get(entry.getKey());
}
return count;
}
public static int getFrequentElement(int[] bcp) {
HashMap<Integer, Integer> map = new HashMap<Integer, Integer>();
ArrayList<Integer> count = new ArrayList<Integer>();
ArrayList<Integer> uniId = new ArrayList<Integer>();
int id = 0;
for (int col = 0; col < bcp.length; col++) {
// System.out.print(bcp[col] + "\t");
int no = 0;
if (!map.containsKey(bcp[col])) {
map.put(bcp[col], id++);
count.add(1);
uniId.add(bcp[col]);
} else {
no = map.get(bcp[col]);
count.set(no, count.get(no) + 1);
}
}
int maximum = Integer.MIN_VALUE;
int maxId = Integer.MIN_VALUE;
for (int i = 0; i < count.size(); i++) {
// System.out.print(uniId.get(i) + ":" + count.get(i) + ",\t");
if (maximum < count.get(i)) {
maximum = count.get(i);
maxId = uniId.get(i);
}
}
// System.out.println();
map.clear();
uniId.clear();
count.clear();
return maxId;
}
public static void getFrequentElement(int[][] bcp, int[] res, char flag) {
if (flag == 'r') {
for (int row = 0; row < bcp.length; row++) {
res[row] = getFrequentElement(bcp[row]);
}
} else {
int colL = bcp[0].length;
int[] column = new int[bcp.length];
for (int col = 0; col < colL; col++) {
for (int row = 0; row < bcp.length; row++) {
column[row] = bcp[row][col];
}
res[col] = getFrequentElement(column);
}
}
}
public static short getFrequentElement(short[] bcp) {
HashMap<Short, Short> map = new HashMap<Short, Short>();
ArrayList<Short> count = new ArrayList<Short>();
ArrayList<Short> uniId = new ArrayList<Short>();
short id = 0;
for (short col = 0; col < bcp.length; col++) {
// System.out.print(bcp[col] + "\t");
short no = 0;
if (!map.containsKey(bcp[col])) {
map.put(bcp[col], id++);
count.add((short) 1);
uniId.add(bcp[col]);
} else {
no = map.get(bcp[col]);
count.set(no, (short) (count.get(no) + 1));
}
}
short maximum = Short.MIN_VALUE;
short maxId = Short.MIN_VALUE;
for (int i = 0; i < count.size(); i++) {
// System.out.print(uniId.get(i) + ":" + count.get(i) + ",\t");
if (maximum < count.get(i)) {
maximum = count.get(i);
maxId = uniId.get(i);
}
}
// System.out.println();
map.clear();
uniId.clear();
count.clear();
return maxId;
}
public static boolean getFrequentElementBinary(int[] sample) {
HashMap<Integer, Integer> map = new HashMap<Integer, Integer>();
ArrayList<Integer> count = new ArrayList<Integer>();
ArrayList<Integer> uniId = new ArrayList<Integer>();
int id = 0;
for (int col = 0; col < sample.length; col++) {
// System.out.print(bcp[col] + "\t");
int no = 0;
if (!map.containsKey(sample[col])) {
map.put(sample[col], id++);
count.add(1);
uniId.add(sample[col]);
} else {
no = map.get(sample[col]);
count.set(no, count.get(no) + 1);
}
}
int maximum = Integer.MIN_VALUE;
int maxId = Integer.MIN_VALUE;
for (int i = 0; i < count.size(); i++) {
// System.out.print(uniId.get(i) + ":" + count.get(i) + ",\t");
if (maximum < count.get(i)) {
maximum = count.get(i);
maxId = uniId.get(i);
}
}
// System.out.println();
map.clear();
uniId.clear();
count.clear();
if(maxId == 1)
return true;
else
return false;
}
public static int[] CountElmt(ArrayList<Integer> newScores1,
ArrayList<Integer> scores) {
int a[] = new int[scores.size()];
for (int i = 0; i < scores.size(); i++) {
a[i] = 0;
}
for (int i = 0; i < newScores1.size(); i++) {
int value = newScores1.get(i);
int pos = scores.indexOf(value);
a[pos]++;
}
return a;
}
public static int countCommElmts(ArrayList<Integer> newScores1,
ArrayList<Integer> newScores2) {
int count = 0;
for (int i = 0; i < newScores1.size(); i++) {
if (newScores1.get(i) == newScores2.get(i))
count++;
}
return count;
}
public static void uniqe(int[] words, ArrayList<Integer> tempUniqueWords,
ArrayList<Integer> tempCounts) {
for (int i = 0; i < words.length; i++) {
if (tempUniqueWords.contains(words[i])) {
int index = tempUniqueWords.indexOf(words[i]);
tempCounts.set(index, tempCounts.get(index) + 1);
} else {
tempUniqueWords.add(words[i]);
tempCounts.add(1);
}
}
}
public static void uniqe(ArrayList<Integer> items) {
// add elements to al, including duplicates
HashSet<Integer> hs = new HashSet<Integer>();
hs.addAll(items);
items.clear();
items.addAll(hs);
}
public static void getTop(float[] array, ArrayList<Integer> rankList, int i) {
rankList.clear();
int index = 0;
HashSet<Integer> scanned = new HashSet<Integer>();
float max = Float.MIN_VALUE;
for (int m = 0; m < i && m < array.length; m++) {
boolean flag = false;
max = Float.MIN_VALUE;
for (int no = 0; no < array.length; no++) {
if (!scanned.contains(no) && array[no] >= max) {
index = no;
max = array[no];
flag = true;
}
}
if(flag) { // found value
scanned.add(index);
rankList.add(index);
// rankProbs.add(array[index]);
}
//System.out.println(m + "\t" + index);
}
}
public static void getTop(float[] array, ArrayList<Integer> rankList,
ArrayList<Float> rankProbs, int i) {
// clear
rankList.clear();
rankProbs.clear();
//
int index = 0;
int count = 0;
HashSet<Integer> scanned = new HashSet<Integer>();
float max = Float.MIN_VALUE;
for (int m = 0; m < i && m < array.length; m++) {
boolean flag = false;
max = Float.MIN_VALUE;
for (int no = 0; no < array.length; no++) {
if (array[no] >= max && !scanned.contains(no)) {
index = no;
max = array[no];
flag = true;
}
}
if(flag) { // found value
scanned.add(index);
rankList.add(index);
rankProbs.add(array[index]);
}
//System.out.println(m + "\t" + index);
}
}
public static void getTopNZ(float[] array, int[] counts,
ArrayList<Integer> rankList, ArrayList<Float> rankProbs, int i, int threshold) {
// clear
rankList.clear();
rankProbs.clear();
//
int index = 0;
float max = Float.MIN_VALUE;
for (int m = 0; m < i && m < array.length; m++) {
boolean flag = false;
max = Float.MIN_VALUE;
for (int no = 0; no < array.length; no++) {
if(counts[no] >= threshold) {
if (array[no] >= max && !rankList.contains(no)) {
index = no;
max = array[no];
flag = true;
}
}
}
if(flag) { // found value
rankList.add(index);
// rankProbs.add(array[index]);
rankProbs.add(counts[index] + 0.0f);
}
//System.out.println(m + "\t" + index);
}
}
public static int sample(double[] probs, int T) {
// roulette sampling
double []pt = new double[T];
//System.out.print(p[0]);
pt[0] = probs[0];
for (int i = 1; i < T; i++) {
pt[i] = probs[i] + pt[i-1];
// System.out.print(" " + pt[i]);
}
// System.out.println();
// scaled sample because of unnormalized p[]
double rouletter = (double) (Math.random() * pt[T - 1]);
short sample = 0;
for (sample = 0; sample < T; sample++) {
if (pt[sample] > rouletter)
break;
}
// System.out.println(rouletter + "\t" + sample);
if(sample < 0 | sample >= T) {
Util.print(probs, "\t", "\n");
System.out.println("Sampling error!");
System.exit(0);
}
return sample;
}
}
| |
package org.engineFRP.maths;
/**
* Based on BennyBox's GameEngine https://github.com/BennyQBD/3DGameEngine.
* Created by TekMaTek on 05/08/2014.
*/
public class Matrix4f {
private float[][] m;
public Matrix4f( ) {
m = new float[ 4 ][ 4 ];
}
public Matrix4f initCamera( Vector3f forward, Vector3f up ) {
Vector3f f = forward;
f.normalized( );
Vector3f right = up;
right.normalized( );
right = right.cross( forward );
m[ 0 ][ 0 ] = right.x;
m[ 0 ][ 1 ] = right.y;
m[ 0 ][ 2 ] = right.z;
m[ 0 ][ 3 ] = 0;
m[ 1 ][ 0 ] = up.x;
m[ 1 ][ 1 ] = up.y;
m[ 1 ][ 2 ] = up.z;
m[ 1 ][ 3 ] = 0;
m[ 2 ][ 0 ] = f.x;
m[ 2 ][ 1 ] = f.y;
m[ 2 ][ 2 ] = f.z;
m[ 2 ][ 3 ] = 0;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Matrix4f initIdentity( ) {
m[ 0 ][ 0 ] = 1;
m[ 0 ][ 1 ] = 0;
m[ 0 ][ 2 ] = 0;
m[ 0 ][ 3 ] = 0;
m[ 1 ][ 0 ] = 0;
m[ 1 ][ 1 ] = 1;
m[ 1 ][ 2 ] = 0;
m[ 1 ][ 3 ] = 0;
m[ 2 ][ 0 ] = 0;
m[ 2 ][ 1 ] = 0;
m[ 2 ][ 2 ] = 1;
m[ 2 ][ 3 ] = 0;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Matrix4f initTranslation( float x, float y, float z ) {
m[ 0 ][ 0 ] = 1;
m[ 0 ][ 1 ] = 0;
m[ 0 ][ 2 ] = 0;
m[ 0 ][ 3 ] = x;
m[ 1 ][ 0 ] = 0;
m[ 1 ][ 1 ] = 1;
m[ 1 ][ 2 ] = 0;
m[ 1 ][ 3 ] = y;
m[ 2 ][ 0 ] = 0;
m[ 2 ][ 1 ] = 0;
m[ 2 ][ 2 ] = 1;
m[ 2 ][ 3 ] = z;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Matrix4f initRotation( float x, float y, float z ) {
Matrix4f rx = new Matrix4f( );
Matrix4f ry = new Matrix4f( );
Matrix4f rz = new Matrix4f( );
x = ( float ) Math.toRadians(x);
y = ( float ) Math.toRadians(y);
z = ( float ) Math.toRadians(z);
rz.m[ 0 ][ 0 ] = ( float ) Math.cos(z);
rz.m[ 0 ][ 1 ] = -( float ) Math.sin(z);
rz.m[ 0 ][ 2 ] = 0;
rz.m[ 0 ][ 3 ] = 0;
rz.m[ 1 ][ 0 ] = ( float ) Math.sin(z);
rz.m[ 1 ][ 1 ] = ( float ) Math.cos(z);
rz.m[ 1 ][ 2 ] = 0;
rz.m[ 1 ][ 3 ] = 0;
rz.m[ 2 ][ 0 ] = 0;
rz.m[ 2 ][ 1 ] = 0;
rz.m[ 2 ][ 2 ] = 1;
rz.m[ 2 ][ 3 ] = 0;
rz.m[ 3 ][ 0 ] = 0;
rz.m[ 3 ][ 1 ] = 0;
rz.m[ 3 ][ 2 ] = 0;
rz.m[ 3 ][ 3 ] = 1;
rx.m[ 0 ][ 0 ] = 1;
rx.m[ 0 ][ 1 ] = 0;
rx.m[ 0 ][ 2 ] = 0;
rx.m[ 0 ][ 3 ] = 0;
rx.m[ 1 ][ 0 ] = 0;
rx.m[ 1 ][ 1 ] = ( float ) Math.cos(x);
rx.m[ 1 ][ 2 ] = -( float ) Math.sin(x);
rx.m[ 1 ][ 3 ] = 0;
rx.m[ 2 ][ 0 ] = 0;
rx.m[ 2 ][ 1 ] = ( float ) Math.sin(x);
rx.m[ 2 ][ 2 ] = ( float ) Math.cos(x);
rx.m[ 2 ][ 3 ] = 0;
rx.m[ 3 ][ 0 ] = 0;
rx.m[ 3 ][ 1 ] = 0;
rx.m[ 3 ][ 2 ] = 0;
rx.m[ 3 ][ 3 ] = 1;
ry.m[ 0 ][ 0 ] = ( float ) Math.cos(y);
ry.m[ 0 ][ 1 ] = 0;
ry.m[ 0 ][ 2 ] = -( float ) Math.sin(y);
ry.m[ 0 ][ 3 ] = 0;
ry.m[ 1 ][ 0 ] = 0;
ry.m[ 1 ][ 1 ] = 1;
ry.m[ 1 ][ 2 ] = 0;
ry.m[ 1 ][ 3 ] = 0;
ry.m[ 2 ][ 0 ] = ( float ) Math.sin(y);
ry.m[ 2 ][ 1 ] = 0;
ry.m[ 2 ][ 2 ] = ( float ) Math.cos(y);
ry.m[ 2 ][ 3 ] = 0;
ry.m[ 3 ][ 0 ] = 0;
ry.m[ 3 ][ 1 ] = 0;
ry.m[ 3 ][ 2 ] = 0;
ry.m[ 3 ][ 3 ] = 1;
m = rz.mul( ry.mul( rx ) ).getM( );
return this;
}
public Matrix4f initScale( float x, float y, float z ) {
m[ 0 ][ 0 ] = x;
m[ 0 ][ 1 ] = 0;
m[ 0 ][ 2 ] = 0;
m[ 0 ][ 3 ] = 0;
m[ 1 ][ 0 ] = 0;
m[ 1 ][ 1 ] = y;
m[ 1 ][ 2 ] = 0;
m[ 1 ][ 3 ] = 0;
m[ 2 ][ 0 ] = 0;
m[ 2 ][ 1 ] = 0;
m[ 2 ][ 2 ] = z;
m[ 2 ][ 3 ] = 0;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Matrix4f initPerspective( float fov, float aspectRatio, float zNear, float zFar ) {
float tanHalfFOV = ( float ) Math.tan(fov / 2);
float zRange = zNear - zFar;
m[ 0 ][ 0 ] = 1.0f / ( tanHalfFOV * aspectRatio );
m[ 0 ][ 1 ] = 0;
m[ 0 ][ 2 ] = 0;
m[ 0 ][ 3 ] = 0;
m[ 1 ][ 0 ] = 0;
m[ 1 ][ 1 ] = 1.0f / tanHalfFOV;
m[ 1 ][ 2 ] = 0;
m[ 1 ][ 3 ] = 0;
m[ 2 ][ 0 ] = 0;
m[ 2 ][ 1 ] = 0;
m[ 2 ][ 2 ] = ( -zNear - zFar ) / zRange;
m[ 2 ][ 3 ] = 2 * zFar * zNear / zRange;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 1;
m[ 3 ][ 3 ] = 0;
return this;
}
public Matrix4f initOrthographic( float left, float right, float bottom, float top, float near, float far ) {
float width = right - left;
float height = top - bottom;
float depth = far - near;
m[ 0 ][ 0 ] = 2 / width;
m[ 0 ][ 1 ] = 0;
m[ 0 ][ 2 ] = 0;
m[ 0 ][ 3 ] = -( right + left ) / width;
m[ 1 ][ 0 ] = 0;
m[ 1 ][ 1 ] = 2 / height;
m[ 1 ][ 2 ] = 0;
m[ 1 ][ 3 ] = -( top + bottom ) / height;
m[ 2 ][ 0 ] = 0;
m[ 2 ][ 1 ] = 0;
m[ 2 ][ 2 ] = -2 / depth;
m[ 2 ][ 3 ] = -( far + near ) / depth;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Matrix4f initRotation( Vector3f forward, Vector3f up ) {
Vector3f f = forward.normalized( );
Vector3f r = up.normalized( );
r = r.cross( f );
Vector3f u = f.cross( r );
return initRotation( f, u, r );
}
public Matrix4f initRotation( Vector3f forward, Vector3f up, Vector3f right ) {
Vector3f f = forward;
Vector3f r = right;
Vector3f u = up;
m[ 0 ][ 0 ] = r.x;
m[ 0 ][ 1 ] = r.y;
m[ 0 ][ 2 ] = r.z;
m[ 0 ][ 3 ] = 0;
m[ 1 ][ 0 ] = u.x;
m[ 1 ][ 1 ] = u.y;
m[ 1 ][ 2 ] = u.z;
m[ 1 ][ 3 ] = 0;
m[ 2 ][ 0 ] = f.x;
m[ 2 ][ 1 ] = f.y;
m[ 2 ][ 2 ] = f.z;
m[ 2 ][ 3 ] = 0;
m[ 3 ][ 0 ] = 0;
m[ 3 ][ 1 ] = 0;
m[ 3 ][ 2 ] = 0;
m[ 3 ][ 3 ] = 1;
return this;
}
public Vector3f transform( Vector3f r ) {
return new Vector3f( m[ 0 ][ 0 ] * r.x + m[ 0 ][ 1 ] * r.y + m[ 0 ][ 2 ] * r.z + m[ 0 ][ 3 ],
m[ 1 ][ 0 ] * r.x + m[ 1 ][ 1 ] * r.y + m[ 1 ][ 2 ] * r.z + m[ 1 ][ 3 ],
m[ 2 ][ 0 ] * r.x + m[ 2 ][ 1 ] * r.y + m[ 2 ][ 2 ] * r.z +
m[ 2 ][ 3 ] );
}
public Matrix4f mul( Matrix4f r ) {
Matrix4f res = new Matrix4f( );
for( int i = 0; i < 4; i++ ) {
for( int j = 0; j < 4; j++ ) {
res.set( i, j, m[ i ][ 0 ] * r.get( 0, j ) +
m[ i ][ 1 ] * r.get( 1, j ) +
m[ i ][ 2 ] * r.get( 2, j ) +
m[ i ][ 3 ] * r.get( 3, j ) );
}
}
return res;
}
public float[][] getM( ) {
float[][] res = new float[ 4 ][ 4 ];
for( int i = 0; i < 4; i++ ) {
for( int j = 0; j < 4; j++ ) {
res[ i ][ j ] = m[ i ][ j ];
}
}
return res;
}
public void setM( float[][] m ) {
this.m = m;
}
public float get( int x, int y ) {
return m[ x ][ y ];
}
public void set( int x, int y, float value ) {
m[ x ][ y ] = value;
}
public String toString( ) {
return "[ " + m[ 0 ][ 0 ] + ", " +
m[ 0 ][ 1 ] + ", " +
m[ 0 ][ 2 ] + ", " +
m[ 0 ][ 3 ] + "\n" +
m[ 1 ][ 0 ] + ", " +
m[ 1 ][ 1 ] + ", " +
m[ 1 ][ 2 ] + ", " +
m[ 1 ][ 3 ] + "\n" +
m[ 2 ][ 0 ] + ", " +
m[ 2 ][ 1 ] + ", " +
m[ 2 ][ 2 ] + ", " +
m[ 2 ][ 3 ] + "\n" +
m[ 3 ][ 0 ] + ", " +
m[ 3 ][ 1 ] + ", " +
m[ 3 ][ 2 ] + ", " +
m[ 3 ][ 3 ] + "]";
}
}
| |
package poweraqua.fusion;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import poweraqua.WordNetJWNL.WordNet;
import poweraqua.core.model.myocmlmodel.OcmlInstance;
import poweraqua.core.model.myrdfmodel.RDFEntity;
import poweraqua.core.model.myrdfmodel.RDFEntityList;
import poweraqua.core.plugin.OntologyPlugin;
import poweraqua.core.tripleModel.linguisticTriple.QueryTriple;
import poweraqua.core.tripleModel.ontologyTriple.OntoTriple;
import poweraqua.core.utils.LabelSplitter;
import poweraqua.core.utils.StringUtils;
import poweraqua.indexingService.manager.MultiIndexManager;
import poweraqua.powermap.elementPhase.EntityMappingTable;
import poweraqua.powermap.elementPhase.SearchSemanticResult;
import poweraqua.powermap.elementPhase.SyntacticComponent;
import poweraqua.powermap.mappingModel.MappingSession;
import poweraqua.powermap.stringMetrics.stringMetricsComparator;
import poweraqua.powermap.triplePhase.OntoTripleBean;
import poweraqua.powermap.triplePhase.TripleMappingTable;
import poweraqua.powermap.triplePhase.TripleSimilarityService;
import poweraqua.serviceConfig.MultiOntologyManager;
public class FusionService
implements IFusionService
{
protected FusedAnswerBean finalAnswerBean;
protected Map<QueryTriple, FusedAnswerBean> answerBeans;
protected SyntacticComponent syntacticComponent;
protected MappingSession session;
protected stringMetricsComparator stringMetricsComparator;
protected TripleSimilarityService tripleSimilarityService;
protected Map<String, EntityMappingTable> entityMappingTablesByKeyword;
public static float getAPPROXIMATE_STANDARD_THRESH_KB()
{
return APPROXIMATE_STANDARD_THRESH_KB;
}
public static void setAPPROXIMATE_STANDARD_THRESH_KB(float aAPPROXIMATE_STANDARD_THRESH_KB)
{
APPROXIMATE_STANDARD_THRESH_KB = aAPPROXIMATE_STANDARD_THRESH_KB;
}
private static float APPROXIMATE_STANDARD_THRESH_KB = new Float(0.416D).floatValue();
private WordNet wordNet = null;
private static char[] specialChars = { '|', '{', '}', '(', ')', '_', '[', ']', '.', '-', '+', ',', ':', '?', '\\', '/', '"', '@' };
public static int MAX_NUM_WNSYNONYMS = 5;
protected int numberOfLuceneSearchCalls;
protected int numberOfComparisons;
Set<String> ignoredOntologies;
Set<String> ignoredOntologiesSynonyms;
public FusionService(TripleSimilarityService tripleSimilarityService)
{
this.tripleSimilarityService = tripleSimilarityService;
this.session = tripleSimilarityService.getMapSession();
this.syntacticComponent = new SyntacticComponent(this.session);
this.stringMetricsComparator = new stringMetricsComparator();
this.finalAnswerBean = new FusedAnswerBean(this);
this.answerBeans = new HashMap();
this.entityMappingTablesByKeyword = new HashMap();
this.ignoredOntologies = new HashSet();
this.ignoredOntologiesSynonyms = new HashSet();
try
{
Calendar calendar = GregorianCalendar.getInstance();
int d = calendar.get(5);
int m = calendar.get(2);
y = calendar.get(1);
}
catch (Exception e)
{
int y;
e.printStackTrace();
}
}
public TripleSimilarityService getTripleSimilarityService()
{
return this.tripleSimilarityService;
}
public int getNumberOfComparisons()
{
return this.numberOfComparisons;
}
public int getNumberOfLuceneSearchCalls()
{
return this.numberOfLuceneSearchCalls;
}
public void mergeByQueryTriples()
{
if (this.answerBeans.size() == 1)
{
this.finalAnswerBean = ((FusedAnswerBean)this.answerBeans.get(this.tripleSimilarityService.getQueryTriples().get(0)));
return;
}
try
{
if (this.wordNet == null) {
this.wordNet = new WordNet(this.session.getRealpath());
}
}
catch (Exception e)
{
e.printStackTrace();
}
if (this.tripleSimilarityService.getQueryTriples().size() > 2) {
System.out.println("ToDo: More than two triples for fusion");
}
System.out.println("Start merging by query triples");
QueryTriple mainQueryTriple = (QueryTriple)this.tripleSimilarityService.getQueryTriples().get(0);
QueryTriple auxQueryTriple = (QueryTriple)this.tripleSimilarityService.getQueryTriples().get(1);
ArrayList<String> firstTermKeyword = new ArrayList();
ArrayList<String> secondTermKeyword = new ArrayList();
TripleMappingTable firstTable = (TripleMappingTable)this.tripleSimilarityService.getOntoKBTripleMappings().get(mainQueryTriple);
for (String ontoUri : firstTable.getMappingTable().keySet())
{
List<OntoTripleBean> beanList = (List)firstTable.getMappingTable().get(ontoUri);
for (OntoTripleBean bean : beanList) {
if (!((OntoTriple)bean.getOntoTripleBean().get(0)).isIS_A_RELATION())
{
String firstTermKeyword_otb = ((OntoTriple)bean.getOntoTripleBean().get(0)).getFirstTerm().getEmt_keyword();
String secondTermKeyword_otb = ((OntoTriple)bean.getOntoTripleBean().get(bean.getOntoTripleBean().size() - 1)).getSecondTerm().getEmt_keyword();
if (!secondTermKeyword.contains(secondTermKeyword_otb)) {
secondTermKeyword.add(secondTermKeyword_otb);
}
if (firstTermKeyword_otb.equals("")) {
firstTermKeyword_otb = ((OntoTriple)bean.getOntoTripleBean().get(0)).getRelation().getEmt_keyword();
}
if (!firstTermKeyword.contains(firstTermKeyword_otb)) {
firstTermKeyword.add(firstTermKeyword_otb);
}
}
}
}
ArrayList<String> firstTermKeywordAux = new ArrayList();
TripleMappingTable secondTable = (TripleMappingTable)this.tripleSimilarityService.getOntoKBTripleMappings().get(auxQueryTriple);
for (String ontoUri : secondTable.getMappingTable().keySet())
{
List<OntoTripleBean> beanList = (List)secondTable.getMappingTable().get(ontoUri);
for (OntoTripleBean bean : beanList)
{
String firstTermKeywordAux_otb = ((OntoTriple)bean.getOntoTripleBean().get(0)).getFirstTerm().getEmt_keyword();
if (firstTermKeywordAux_otb.equals("")) {
firstTermKeywordAux_otb = ((OntoTriple)bean.getOntoTripleBean().get(0)).getRelation().getEmt_keyword();
}
if (!firstTermKeywordAux.contains(firstTermKeywordAux_otb)) {
firstTermKeywordAux.add(firstTermKeywordAux_otb);
}
}
}
System.out.println("are the subjects similar? " + firstTermKeyword.toString() + " vs " + firstTermKeywordAux.toString());
System.out.println("are the objects and subjects similar? " + secondTermKeyword.toString() + " vs " + firstTermKeywordAux.toString());
boolean intersection = false;
boolean condition = false;
for (Iterator i$ = firstTermKeyword.iterator(); i$.hasNext();)
{
firstTerm = (String)i$.next();
for (String secondTerm : firstTermKeywordAux) {
if (((mainQueryTriple.getQueryTerm().contains(firstTerm)) && (mainQueryTriple.getQueryTerm().contains(secondTerm))) || (firstTerm.equals(secondTerm)))
{
intersection = true;
break;
}
}
}
String firstTerm;
for (Iterator i$ = secondTermKeyword.iterator(); i$.hasNext();)
{
firstTerm = (String)i$.next();
for (String secondTerm : firstTermKeywordAux) {
if (firstTerm.equals(secondTerm))
{
condition = true;
break;
}
}
}
String firstTerm;
if ((intersection) && (!condition))
{
if (mainQueryTriple.getTypeQuestion() == 19)
{
System.out.println("UNION ACROSS QUERY TRIPLES");
this.finalAnswerBean = fuseSimilarClustersIntersection(mainQueryTriple, auxQueryTriple, true);
}
else
{
System.out.println("INTERSECTION");
this.finalAnswerBean = fuseSimilarClustersIntersection(mainQueryTriple, auxQueryTriple, false);
}
}
else
{
System.out.println("CONDITION");
this.finalAnswerBean = fuseSimilarClustersConditional(mainQueryTriple, auxQueryTriple);
}
if (this.wordNet != null)
{
this.wordNet.closeDictionary();
this.wordNet = null;
}
}
private void createLog(FusedAnswerBean finalAnswerBean)
{
System.out.println("Creating a log of answers for seals evaluation");
ArrayList<String> answers = new ArrayList();
if ((finalAnswerBean.getAnswers() != null) && (!finalAnswerBean.getAnswers().isEmpty()))
{
for (RDFEntityCluster cluster : finalAnswerBean.getAnswers()) {
for (RDFEntityEntry entry : cluster.getEntries()) {
answers.add(entry.getValue().getURI());
}
}
}
else
{
RDFEntityList allanswers = new RDFEntityList();
for (QueryTriple qt : this.tripleSimilarityService.getQueryTriples())
{
TripleMappingTable tripleMappingTable = (TripleMappingTable)this.tripleSimilarityService.getOntoKBTripleMappings().get(qt);
allanswers.addAllRDFEntity(tripleMappingTable.getAllAnswersNoRepetitions());
}
answers.addAll(allanswers.getUris());
}
String path;
String path;
if (this.session.getRealpath().equals("")) {
path = "./logs/sealsanswer.log";
} else {
path = this.session.getRealpath() + "/WEB-INF/logs/sealsanswer.log";
}
try
{
Writer log = null;
File file_log = new File(path);
log = new BufferedWriter(new FileWriter(file_log));
log.write(System.currentTimeMillis() + "\n");
for (String answer : answers) {
log.write(answer + "\n");
}
if (answers.isEmpty()) {
log.write("null \n");
}
log.close();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public void formRDFEntityEntries(QueryTriple queryTriple)
{
Calendar calendarBefore = new GregorianCalendar();
this.numberOfComparisons = 0;
this.numberOfLuceneSearchCalls = 0;
Map<String, RDFEntityCluster> currentMapByLabel = new HashMap();
Map<String, RDFEntityCluster> currentMapByLocalName = new HashMap();
try
{
if (this.wordNet == null) {
this.wordNet = new WordNet(this.session.getRealpath());
}
}
catch (Exception e)
{
e.printStackTrace();
}
Map<String, RDFEntityEntry> entryMap = new HashMap();
List<RDFEntityEntry> entryList = new ArrayList();
TripleMappingTable currentTripleMappingTable = (TripleMappingTable)this.tripleSimilarityService.getOntoKBTripleMappings().get(queryTriple);
int maxAnswerSetSize = 0;
String ontologyWithMaxAnswerSet = "";
this.ignoredOntologies = new HashSet();
this.ignoredOntologiesSynonyms = new HashSet();
for (Iterator i$ = currentTripleMappingTable.getMappingTable().keySet().iterator(); i$.hasNext();)
{
ontology = (String)i$.next();
List<OntoTripleBean> ontoTripleBeans = (List)currentTripleMappingTable.getMappingTable().get(ontology);
for (i$ = ontoTripleBeans.iterator(); i$.hasNext();)
{
ontoTripleBean = (OntoTripleBean)i$.next();
RDFEntityList answerList = ontoTripleBean.getAnswer_instances();
if (answerList.size() > maxAnswerSetSize)
{
maxAnswerSetSize = answerList.size();
ontologyWithMaxAnswerSet = ontology;
}
if (answerList.size() > 500) {
this.ignoredOntologies.add(ontology);
}
if (answerList.size() > 200) {
this.ignoredOntologiesSynonyms.add(ontology);
}
for (RDFEntity rdfEntity : answerList.getAllRDFEntities())
{
RDFEntityEntry entry = (RDFEntityEntry)entryMap.get(rdfEntity.getURI());
if (entry == null)
{
entry = new RDFEntityEntry(ontoTripleBean);
entry.setValue(rdfEntity);
entryMap.put(rdfEntity.getURI(), entry);
entryList.add(entry);
entry.setOntologyId(ontology);
}
else
{
if (entry.getOntoTripleBean() != ontoTripleBean) {
entry.getOntoTripleBean().addBeans(ontoTripleBean);
}
if (rdfEntity.getRefers_to() != null) {
entry.addRefersToValue(rdfEntity.getRefers_to());
}
}
}
}
}
String ontology;
Iterator i$;
OntoTripleBean ontoTripleBean;
if (maxAnswerSetSize > 100) {
this.ignoredOntologies.add(ontologyWithMaxAnswerSet);
}
Collections.sort(entryList, new RDFEntityEntryComparator());
Calendar calendarAfter = new GregorianCalendar();
System.out.println("Before fusing similar");
System.out.println("Time cost: " + (calendarAfter.getTimeInMillis() - calendarBefore.getTimeInMillis()));
FusedAnswerBean answerBean = fuseSimilarClustersUnion(entryMap, entryList);
this.answerBeans.put(queryTriple, answerBean);
System.out.println("Number of clusters " + answerBean.getAnswers().size());
calendarAfter = new GregorianCalendar();
System.out.println("Time cost: " + (calendarAfter.getTimeInMillis() - calendarBefore.getTimeInMillis()));
}
private FusedAnswerBean fuseSimilarClustersIntersection(QueryTriple firstQueryTriple, QueryTriple secondQueryTriple, boolean isOrQuery)
{
String keyword = "";
Map<RDFEntityEntry, RDFEntityCluster> clusteredEntryMap = new HashMap();
List<RDFEntityEntry> similarEntries = new ArrayList();
List<RDFEntityEntry> maybeSimilarEntries = new ArrayList();
List<EntityMappingTable> tablesSynonym = new ArrayList();
List<String> syns = new ArrayList();
Set<RDFEntityCluster> unmergedClusters = new HashSet();
FusedAnswerBean answerBean = new FusedAnswerBean(this);
FusedAnswerBean secondAnswerBean;
FusedAnswerBean firstAnswerBean;
FusedAnswerBean secondAnswerBean;
if (((FusedAnswerBean)this.answerBeans.get(firstQueryTriple)).getAnswers().size() <= ((FusedAnswerBean)this.answerBeans.get(secondQueryTriple)).getAnswers().size())
{
FusedAnswerBean firstAnswerBean = (FusedAnswerBean)this.answerBeans.get(firstQueryTriple);
secondAnswerBean = (FusedAnswerBean)this.answerBeans.get(secondQueryTriple);
}
else
{
firstAnswerBean = (FusedAnswerBean)this.answerBeans.get(secondQueryTriple);
secondAnswerBean = (FusedAnswerBean)this.answerBeans.get(firstQueryTriple);
}
unmergedClusters.addAll(firstAnswerBean.getAnswers());
unmergedClusters.addAll(secondAnswerBean.getAnswers());
Map<String, RDFEntityCluster> secondClusterMap = new HashMap();
Set<RDFEntityCluster> clustersToMergeInto = new HashSet();
Set<String> ontologyIdsMentioned = new HashSet();
for (RDFEntityCluster currentCluster : firstAnswerBean.getAnswers()) {
for (RDFEntityEntry entry : currentCluster.getEntries()) {
ontologyIdsMentioned.add(entry.getOntologyId());
}
}
try
{
Set<String> ontologyIdSet = new HashSet();
ArrayList<SearchSemanticResult> searchResults = new ArrayList();
for (Iterator i$ = secondAnswerBean.getAnswers().iterator(); i$.hasNext();)
{
currentCluster = (RDFEntityCluster)i$.next();
for (RDFEntityEntry entry : currentCluster.getEntries())
{
secondClusterMap.put(entry.getValue().getURI(), currentCluster);
ontologyIdsMentioned.add(entry.getOntologyId());
}
}
RDFEntityCluster currentCluster;
for (RDFEntityCluster currentCluster : firstAnswerBean.getAnswers())
{
clustersToMergeInto.clear();
for (Iterator i$ = currentCluster.getEntries().iterator(); i$.hasNext();)
{
entry = (RDFEntityEntry)i$.next();
RDFEntityCluster cluster = null;
ontologyIdSet.clear();
similarEntries.clear();
tablesSynonym.clear();
keyword = cleanString(entry.getValue().getLocalName());
uri = entry.getValue().getURI();
if (secondClusterMap.containsKey(uri)) {
clustersToMergeInto.add(secondClusterMap.get(uri));
}
if (ontologyIdsMentioned.size() > 1)
{
System.out.println("Query: " + keyword);
tableByLocalName = getEntityMappingTableForKeyword(keyword);
System.out.println("Answers: " + tableByLocalName.getOntologyIDMappings().size());
keyword = cleanString(entry.getValue().getLabel());
tableByLabel = getEntityMappingTableForKeyword(keyword);
ontologyIdSet.addAll(tableByLocalName.getOntologyIDMappings());
ontologyIdSet.addAll(tableByLabel.getOntologyIDMappings());
syns.clear();
if (!this.ignoredOntologiesSynonyms.contains(entry.getOntologyId()))
{
try
{
boolean isWN = this.wordNet.Initialize(keyword.toLowerCase());
if ((isWN) && (this.wordNet.isIs_wordnetCompound())) {
syns = this.wordNet.getSynonyms(MAX_NUM_WNSYNONYMS);
}
}
catch (Exception e)
{
System.err.println("Keyword " + keyword + " caused a WordNet error");
e.printStackTrace();
}
if (keyword.toLowerCase().contains("mustang")) {
System.out.println("here");
}
for (String syn : syns)
{
EntityMappingTable tableSynonym = getEntityMappingTableForKeyword(syn);
tablesSynonym.add(tableSynonym);
ontologyIdSet.addAll(tableSynonym.getOntologyIDMappings());
}
}
for (String id : ontologyIdSet)
{
searchResults.clear();
if (tableByLocalName.getOntologyMappings(id) != null) {
searchResults.addAll(tableByLocalName.getOntologyMappings(id));
}
if (tableByLabel.getOntologyMappings(id) != null) {
searchResults.addAll(tableByLabel.getOntologyMappings(id));
}
for (EntityMappingTable tsyn : tablesSynonym) {
if (tsyn.getOntologyMappings(id) != null) {
searchResults.addAll(tsyn.getOntologyMappings(id));
}
}
double maxscore = -1.0D;
RDFEntityEntry closestEntry = null;
for (SearchSemanticResult searchResult : searchResults)
{
String foundUri = searchResult.getEntity().getURI();
foundCluster = (RDFEntityCluster)secondClusterMap.get(foundUri);
if (foundCluster != null)
{
if (uri.equals(foundUri))
{
clustersToMergeInto.add(foundCluster);
break;
}
boolean toCompare = true;
for (RDFEntityEntry secondEntry : foundCluster.getEntries()) {
if (secondEntry.getOntologyId().equals(entry.getOntologyId()))
{
toCompare = false;
break;
}
}
if (toCompare) {
for (RDFEntityEntry secondEntry : foundCluster.getEntries())
{
boolean areSimilar = areSimilar(entry.getValue(), secondEntry.getValue());
System.out.println("keyword: " + keyword + " local name: " + secondEntry.getValue().getLocalName() + " label: " + secondEntry.getValue().getLabel() + " similarity: " + this.stringMetricsComparator.getSimilar1());
if (areSimilar)
{
clustersToMergeInto.add(foundCluster);
break;
}
}
}
}
}
}
}
}
RDFEntityEntry entry;
String uri;
EntityMappingTable tableByLocalName;
EntityMappingTable tableByLabel;
RDFEntityCluster foundCluster;
if (clustersToMergeInto.size() > 0)
{
RDFEntityCluster mergedCluster = currentCluster;
unmergedClusters.remove(currentCluster);
for (RDFEntityCluster toMerge : clustersToMergeInto)
{
mergedCluster = mergedCluster.merge(toMerge);
unmergedClusters.remove(toMerge);
}
answerBean.addAnswer(mergedCluster);
}
}
if (isOrQuery) {
for (RDFEntityCluster unmergedCluster : unmergedClusters) {
answerBean.addAnswer(unmergedCluster);
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
answerBean.consolidate();
return answerBean;
}
private void putToClusterSetByUriMap(Map<String, Set<RDFEntityCluster>> map, String key, RDFEntityCluster cluster)
{
Set<RDFEntityCluster> clusterSet;
Set<RDFEntityCluster> clusterSet;
if (map.containsKey(key))
{
clusterSet = (Set)map.get(key);
}
else
{
clusterSet = new HashSet();
map.put(key, clusterSet);
}
clusterSet.add(cluster);
}
private FusedAnswerBean fuseSimilarClustersConditional(QueryTriple mainQueryTriple, QueryTriple conditionQueryTriple)
{
String keyword = "";
List<EntityMappingTable> tablesSynonym = new ArrayList();
List<String> syns = new ArrayList();
Map<RDFEntityEntry, RDFEntityCluster> clusteredEntryMap = new HashMap();
List<RDFEntityEntry> similarEntries = new ArrayList();
List<RDFEntityEntry> maybeSimilarEntries = new ArrayList();
FusedAnswerBean answerBean = new FusedAnswerBean(this);
FusedAnswerBean mainAnswerBean = (FusedAnswerBean)this.answerBeans.get(mainQueryTriple);
FusedAnswerBean conditionAnswerBean = (FusedAnswerBean)this.answerBeans.get(conditionQueryTriple);
Map<String, Set<RDFEntityCluster>> secondClusterMap = new HashMap();
List<RDFEntityCluster> clustersToMergeInto = new ArrayList();
Set<RDFEntity> alreadyLookedAndNotFound = new HashSet();
Map<RDFEntity, Set<RDFEntityCluster>> alreadyLookedAndFound = new HashMap();
try
{
ontologyIdSet = new HashSet();
searchResults = new ArrayList();
ontologyIdsMentioned = new HashSet();
useMainBean = true;
FusedAnswerBean auxiliaryAnswerBean;
FusedAnswerBean primaryAnswerBean;
FusedAnswerBean auxiliaryAnswerBean;
if (mainAnswerBean.getAnswers().size() <= conditionAnswerBean.getAnswers().size())
{
FusedAnswerBean primaryAnswerBean = mainAnswerBean;
auxiliaryAnswerBean = conditionAnswerBean;
}
else
{
primaryAnswerBean = conditionAnswerBean;
auxiliaryAnswerBean = mainAnswerBean;
useMainBean = false;
}
for (RDFEntityCluster currentCluster : mainAnswerBean.getAnswers()) {
for (RDFEntityEntry entry : currentCluster.getEntries()) {
ontologyIdsMentioned.add(entry.getOntologyId());
}
}
for (Iterator i$ = auxiliaryAnswerBean.getAnswers().iterator(); i$.hasNext();)
{
currentCluster = (RDFEntityCluster)i$.next();
for (RDFEntityEntry entry : currentCluster.getEntries())
{
ontologyIdsMentioned.add(entry.getOntologyId());
if (useMainBean) {
putToClusterSetByUriMap(secondClusterMap, entry.getValue().getURI(), currentCluster);
} else {
for (RDFEntity refTo : entry.getRefersToValues()) {
putToClusterSetByUriMap(secondClusterMap, refTo.getURI(), currentCluster);
}
}
}
}
RDFEntityCluster currentCluster;
for (i$ = primaryAnswerBean.getAnswers().iterator(); i$.hasNext();)
{
currentCluster = (RDFEntityCluster)i$.next();
clustersToMergeInto.clear();
for (Iterator i$ = currentCluster.getEntries().iterator(); i$.hasNext();)
{
entry = (RDFEntityEntry)i$.next();
tablesSynonym.clear();
OcmlInstance currentInstance = null;
OcmlInstance foundInstance = null;
OcmlInstance topInstance = null;
RDFEntityCluster cluster = null;
ontologyIdSet.clear();
similarEntries.clear();
if ((!useMainBean) || (!entry.getRefersToValues().isEmpty()))
{
List<RDFEntity> entitiesToCheck;
List<RDFEntity> entitiesToCheck;
if (useMainBean)
{
entitiesToCheck = entry.getRefersToValues();
}
else
{
entitiesToCheck = new ArrayList();
entitiesToCheck.add(entry.getValue());
}
for (RDFEntity checkedEntity : entitiesToCheck)
{
String uri = checkedEntity.getURI();
if (!alreadyLookedAndNotFound.contains(checkedEntity))
{
if (alreadyLookedAndFound.containsKey(checkedEntity))
{
boolean lookedAndFound = true;
for (RDFEntityCluster tmpCluster : (Set)alreadyLookedAndFound.get(checkedEntity)) {
if (!clustersToMergeInto.contains(tmpCluster)) {
clustersToMergeInto.add(tmpCluster);
}
}
}
if (secondClusterMap.containsKey(uri)) {
for (RDFEntityCluster tmpCluster : (Set)secondClusterMap.get(uri)) {
if (!clustersToMergeInto.contains(tmpCluster)) {
clustersToMergeInto.add(tmpCluster);
}
}
}
if (ontologyIdsMentioned.size() > 1)
{
keyword = null;
EntityMappingTable tableByLocalName = null;
EntityMappingTable tableByLabel = null;
try
{
keyword = cleanString(checkedEntity.getLocalName());
}
catch (NullPointerException e)
{
System.err.println(entry.getValue().getLabel());
e.printStackTrace();
}
if (keyword != null)
{
System.out.println("Query: " + keyword);
tableByLocalName = getEntityMappingTableForKeyword(keyword);
ontologyIdSet.addAll(tableByLocalName.getOntologyIDMappings());
System.out.println("Answers: " + tableByLocalName.getOntologyIDMappings().size());
}
keyword = null;
try
{
keyword = cleanString(checkedEntity.getLabel());
}
catch (NullPointerException e)
{
System.err.println(entry.getValue().getLabel());
e.printStackTrace();
}
if (keyword != null)
{
System.out.println("Query: " + keyword);
tableByLabel = getEntityMappingTableForKeyword(keyword);
ontologyIdSet.addAll(tableByLabel.getOntologyIDMappings());
syns.clear();
if (!this.ignoredOntologiesSynonyms.contains(entry.getOntologyId()))
{
try
{
System.out.println("Looking for synonyms");
boolean isWN = this.wordNet.Initialize(keyword.toLowerCase());
if ((isWN) && (this.wordNet.isIs_wordnetCompound())) {
syns = this.wordNet.getSynonyms(MAX_NUM_WNSYNONYMS);
}
}
catch (Exception e)
{
System.err.println("Keyword " + keyword + " caused a WordNet error");
e.printStackTrace();
}
for (String syn : syns)
{
System.out.println("\tsynonym query: " + syn);
EntityMappingTable tableSynonym = getEntityMappingTableForKeyword(syn);
tablesSynonym.add(tableSynonym);
ontologyIdSet.addAll(tableSynonym.getOntologyIDMappings());
}
}
}
boolean lookedAndFound = false;
if (ontologyIdSet.size() == 0) {
alreadyLookedAndNotFound.add(checkedEntity);
}
for (String id : ontologyIdSet)
{
searchResults.clear();
if ((tableByLocalName != null) &&
(tableByLocalName.getOntologyMappings(id) != null)) {
searchResults.addAll(tableByLocalName.getOntologyMappings(id));
}
if ((tableByLabel != null) &&
(tableByLabel.getOntologyMappings(id) != null)) {
searchResults.addAll(tableByLabel.getOntologyMappings(id));
}
for (EntityMappingTable tsyn : tablesSynonym) {
if (tsyn.getOntologyMappings(id) != null) {
searchResults.addAll(tsyn.getOntologyMappings(id));
}
}
double maxscore = -1.0D;
RDFEntityEntry closestEntry = null;
for (SearchSemanticResult searchResult : searchResults)
{
foundUri = searchResult.getEntity().getURI();
Set<RDFEntityCluster> foundClusters = (Set)secondClusterMap.get(foundUri);
if ((foundClusters != null) &&
(!foundClusters.isEmpty())) {
for (i$ = foundClusters.iterator(); i$.hasNext();)
{
currentFoundCluster = (RDFEntityCluster)i$.next();
lookedAndFound = true;
if (uri.equals(foundUri))
{
if (clustersToMergeInto.contains(currentFoundCluster)) {
break;
}
clustersToMergeInto.add(currentFoundCluster); break;
}
boolean toCompare = true;
for (RDFEntityEntry secondEntry : currentFoundCluster.getEntries()) {
if (secondEntry.getOntologyId().equals(entry.getOntologyId()))
{
toCompare = false;
break;
}
}
if (toCompare) {
for (RDFEntityEntry secondEntry : currentFoundCluster.getEntries())
{
boolean areSimilar;
if (useMainBean)
{
boolean areSimilar = areSimilar(checkedEntity, secondEntry.getValue());
System.out.println("keyword: " + keyword + " local name: " + secondEntry.getValue().getLocalName() + " label: " + secondEntry.getValue().getLabel() + " similarity: " + this.stringMetricsComparator.getSimilar1());
}
else
{
areSimilar = false;
for (RDFEntity refTo : secondEntry.getRefersToValues())
{
areSimilar |= areSimilar(checkedEntity, refTo);
System.out.println("keyword: " + keyword + " local name: " + secondEntry.getValue().getLocalName() + " label: " + secondEntry.getValue().getLabel() + " similarity: " + this.stringMetricsComparator.getSimilar1());
if (areSimilar) {
break;
}
}
}
if (areSimilar)
{
lookedAndFound = true;
if (clustersToMergeInto.contains(currentFoundCluster)) {
break;
}
clustersToMergeInto.add(currentFoundCluster); break;
}
}
}
}
}
}
}
String foundUri;
Iterator i$;
RDFEntityCluster currentFoundCluster;
if (!lookedAndFound)
{
alreadyLookedAndNotFound.add(checkedEntity);
}
else
{
System.out.println(checkedEntity.getLabel() + " added to the AlreadyFound table");
if (clustersToMergeInto.size() > 0) {
if (alreadyLookedAndFound.containsKey(checkedEntity))
{
Set<RDFEntityCluster> tmpCluster = (Set)alreadyLookedAndFound.get(checkedEntity);
tmpCluster.addAll(clustersToMergeInto);
}
else
{
Set<RDFEntityCluster> tmpCluster = new HashSet(clustersToMergeInto);
alreadyLookedAndFound.put(checkedEntity, tmpCluster);
}
}
}
}
}
}
}
}
RDFEntityEntry entry;
if (clustersToMergeInto.size() > 0) {
if (useMainBean)
{
for (RDFEntityCluster clusterToMergeInto : clustersToMergeInto) {
currentCluster.getOntoTripleBean().addBeans(clusterToMergeInto.getOntoTripleBean());
}
answerBean.addAnswer(currentCluster);
}
else
{
for (RDFEntityCluster clusterToMergeInto : clustersToMergeInto)
{
clusterToMergeInto.getOntoTripleBean().addBeans(currentCluster.getOntoTripleBean());
answerBean.addAnswer(clusterToMergeInto);
}
}
}
}
}
catch (Exception e)
{
Set<String> ontologyIdSet;
ArrayList<SearchSemanticResult> searchResults;
Set<String> ontologyIdsMentioned;
boolean useMainBean;
Iterator i$;
RDFEntityCluster currentCluster;
e.printStackTrace();
}
return answerBean;
}
private EntityMappingTable getEntityMappingTableForKeyword(String keyword)
throws Exception
{
EntityMappingTable resultTable = null;
if (this.entityMappingTablesByKeyword.containsKey(keyword))
{
resultTable = (EntityMappingTable)this.entityMappingTablesByKeyword.get(keyword);
}
else
{
if (!StringUtils.isCompound(keyword))
{
resultTable = this.session.getMultiIndexManager().searchEntityMappingsonKnowledgeBase(keyword, "equivalentMatching", getAPPROXIMATE_STANDARD_THRESH_KB(), 2);
this.numberOfLuceneSearchCalls += 1;
}
else
{
resultTable = this.session.getMultiIndexManager().searchEntityMappingsonKnowledgeBase(keyword, "equivalentMatching", SyntacticComponent.STANDARD_THRESH_KB, 2);
this.numberOfLuceneSearchCalls += 1;
}
this.entityMappingTablesByKeyword.put(keyword, resultTable);
}
return resultTable;
}
private FusedAnswerBean fuseSimilarClustersUnion(Map<String, RDFEntityEntry> entryMap, List<RDFEntityEntry> entryList)
{
String keyword = "";
Map<String, EntityMappingTable> tablesSynonym = new HashMap();
List<String> syns = new ArrayList();
Map<RDFEntityEntry, RDFEntityCluster> clusteredEntryMap = new HashMap();
List<RDFEntityEntry> similarEntries = new ArrayList();
FusedAnswerBean answerBean = new FusedAnswerBean(this);
Map<String, Set<RDFEntityEntry>> mapByLabel = new HashMap();
Map<String, Set<RDFEntityEntry>> mapByLocalName = new HashMap();
try
{
Set<String> ontologyIdSet = new HashSet();
ArrayList<SearchSemanticResult> searchResults = new ArrayList();
Set<String> ontologyIdsMentioned = new HashSet();
for (RDFEntityEntry entry : entryList) {
ontologyIdsMentioned.add(entry.getOntologyId());
}
if (ontologyIdsMentioned.size() > 1) {
for (RDFEntityEntry entry : entryList)
{
OcmlInstance currentInstance = null;
OcmlInstance foundInstance = null;
OcmlInstance topInstance = null;
cluster = null;
if (!this.ignoredOntologies.contains(entry.getOntologyId()))
{
ontologyIdSet.clear();
similarEntries.clear();
tablesSynonym.clear();
keyword = cleanString(entry.getValue().getLocalName());
String uri = entry.getValue().getURI();
System.out.println("Query: " + keyword);
EntityMappingTable tableByLocalName = getEntityMappingTableForKeyword(keyword);
System.out.println("Answers: " + tableByLocalName.getOntologyIDMappings().size());
keyword = cleanString(entry.getValue().getLabel());
EntityMappingTable tableByLabel = getEntityMappingTableForKeyword(keyword);
ontologyIdSet.addAll(tableByLocalName.getOntologyIDMappings());
ontologyIdSet.addAll(tableByLabel.getOntologyIDMappings());
syns.clear();
if (!this.ignoredOntologiesSynonyms.contains(entry.getOntologyId()))
{
try
{
if (keyword.split(" ").length < 5)
{
boolean isWN = this.wordNet.Initialize(keyword.toLowerCase());
if ((isWN) && (this.wordNet.isIs_wordnetCompound())) {
syns = this.wordNet.getSynonyms(MAX_NUM_WNSYNONYMS);
}
}
}
catch (Exception e)
{
System.err.println("Keyword " + keyword + " caused a WordNet error");
e.printStackTrace();
}
for (String syn : syns)
{
EntityMappingTable tableSynonym = getEntityMappingTableForKeyword(syn);
tablesSynonym.put(syn, tableSynonym);
ontologyIdSet.addAll(tableSynonym.getOntologyIDMappings());
}
}
for (String id : ontologyIdSet) {
if (!id.equals(entry.getOntologyId()))
{
searchResults.clear();
if (tableByLocalName.getOntologyMappings(id) != null) {
searchResults.addAll(tableByLocalName.getOntologyMappings(id));
}
if (tableByLabel.getOntologyMappings(id) != null) {
searchResults.addAll(tableByLabel.getOntologyMappings(id));
}
for (String syn : tablesSynonym.keySet())
{
EntityMappingTable tsyn = (EntityMappingTable)tablesSynonym.get(syn);
if (tsyn.getOntologyMappings(id) != null) {
searchResults.addAll(tsyn.getOntologyMappings(id));
}
}
double maxscore = -1.0D;
RDFEntityEntry closestEntry = null;
for (SearchSemanticResult searchResult : searchResults)
{
String foundUri = searchResult.getEntity().getURI();
if (!uri.equals(foundUri))
{
RDFEntityEntry foundEntry = (RDFEntityEntry)entryMap.get(foundUri);
if (foundEntry != null)
{
boolean areSimilar = areSimilar(entry.getValue(), foundEntry.getValue());
if (areSimilar) {
if (this.stringMetricsComparator.getSimilar1() > maxscore)
{
maxscore = this.stringMetricsComparator.getSimilar1();
closestEntry = foundEntry;
}
else if (this.stringMetricsComparator.getSimilar1() == maxscore)
{
if (!closestEntry.getValue().getURI().equals(foundEntry.getValue().getURI()))
{
System.out.println("Compare using all properties");
if (currentInstance == null) {
currentInstance = getOcmlInstance(entry);
}
if (topInstance == null) {
topInstance = getOcmlInstance(closestEntry);
}
if (foundInstance == null) {
foundInstance = getOcmlInstance(foundEntry);
}
if (BagOfWordsEntityComparator.getSimilarity(currentInstance, foundInstance) > BagOfWordsEntityComparator.getSimilarity(currentInstance, topInstance))
{
topInstance = foundInstance;
closestEntry = foundEntry;
}
}
}
}
}
}
}
if (closestEntry != null)
{
similarEntries.add(closestEntry);
if (cluster == null) {
cluster = (RDFEntityCluster)clusteredEntryMap.get(closestEntry);
}
System.out.println("Approved: keyword: " + keyword + " local name: " + closestEntry.getValue().getLocalName() + " label: " + closestEntry.getValue().getLabel());
}
}
}
if (similarEntries.size() > 0)
{
System.out.println("Merge them");
if (cluster == null)
{
cluster = new RDFEntityCluster(this);
answerBean.addAnswer(cluster);
}
if (cluster.addEntry(entry)) {
clusteredEntryMap.put(entry, cluster);
}
for (RDFEntityEntry similarEntry : similarEntries) {
if (cluster.addEntry(similarEntry)) {
clusteredEntryMap.put(similarEntry, cluster);
}
}
}
}
}
}
RDFEntityCluster cluster;
System.out.println("Entries: " + entryList.size());
entryList.removeAll(clusteredEntryMap.keySet());
for (RDFEntityEntry entry : entryList)
{
RDFEntityCluster cluster = new RDFEntityCluster(this);
cluster.addEntry(entry);
answerBean.addAnswer(cluster);
}
System.out.println("Clusters: " + answerBean.getAnswers().size());
answerBean.sortAnswers();
}
catch (Exception e)
{
e.printStackTrace();
}
return answerBean;
}
public FusedAnswerBean getFinalAnswerBeanSortedBy(int criterion)
{
FusedAnswerBean bean = new FusedAnswerBean(this);
for (RDFEntityCluster cluster : this.finalAnswerBean.getAnswers()) {
bean.addAnswer(cluster);
}
bean.sortAnswers(criterion);
return bean;
}
public FusedAnswerBean getFinalAnswerBean()
{
return this.finalAnswerBean;
}
public Map<QueryTriple, FusedAnswerBean> getAnswerBeanMap()
{
return this.answerBeans;
}
public void setTripleSimilarityService(TripleSimilarityService service)
{
this.tripleSimilarityService = service;
}
private boolean areSimilar(RDFEntity entity1, RDFEntity entity2)
{
this.numberOfComparisons += 1;
if (this.stringMetricsComparator.stringSimilarity(entity1.getLocalName(), entity2.getLabel())) {
return true;
}
if (this.stringMetricsComparator.stringSimilarity(entity1.getLabel(), entity2.getLocalName())) {
return true;
}
if (this.stringMetricsComparator.stringSimilarity(entity1.getLabel(), entity2.getLabel())) {
return true;
}
try
{
boolean isWN = this.wordNet.Initialize(cleanString(entity1.getLabel().toLowerCase()));
if (isWN) {
for (String syn : this.wordNet.getSynonyms()) {
if (this.stringMetricsComparator.stringSimilarityLight(syn, entity2.getLabel())) {
return true;
}
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
return false;
}
private OcmlInstance getOcmlInstance(RDFEntityEntry entry)
{
MultiOntologyManager ontoManager = this.session.getMultiOntologyManager();
try
{
OntologyPlugin plugin = ontoManager.getPlugin(entry.getOntologyId());
return plugin.getInstanceInfo(entry.getValue().getURI());
}
catch (Exception e)
{
e.printStackTrace();
}
return null;
}
private static String getQueryTripleTitle(QueryTriple triple)
{
String res = "";
List<String> terms = triple.getQueryTerm();
res = "(";
int i = 0;
for (String term : terms)
{
res = res + term;
if (i < terms.size() - 1) {
res = res + ",";
}
i++;
}
res = res + ")";
res = res + "-";
if (triple.getRelation() != null) {
res = res + triple.getRelation();
}
res = res + "-";
if (triple.getSecondTerm() != null) {
res = res + triple.getSecondTerm();
}
res = res + "-";
if (triple.getThirdTerm() != null) {
res = res + triple.getThirdTerm();
}
return res;
}
private static String cleanString(String val)
{
String res = val;
if (res.indexOf("^^") != -1) {
res = res.substring(0, res.indexOf("^^"));
}
for (int i = 0; i < specialChars.length; i++) {
while (res.indexOf(specialChars[i]) != -1) {
res = res.substring(0, res.indexOf(specialChars[i])) + " " + res.substring(res.indexOf(specialChars[i]) + 1);
}
}
if (!res.contains(" ")) {
res = LabelSplitter.splitOnCaps(res);
}
return res;
}
public static void main(String[] args)
throws Exception
{
System.out.println(cleanString("|NEWORLEANS@fig|"));
MappingSession session = new MappingSession();
String keyword = "French Republic";
String[] keywords = { "Wyoming" };
EntityMappingTable table;
for (int i = 0; i < keywords.length; i++)
{
keyword = keywords[i];
table = session.getMultiIndexManager().searchEntityMappingsonKnowledgeBase(keyword, "equivalentMatching", new Float(0.1D).floatValue(), 2);
for (String id : table.getOntologyIDMappings())
{
ArrayList<SearchSemanticResult> searchResults = table.getOntologyMappings(id);
for (SearchSemanticResult searchResult : searchResults)
{
String foundUri = searchResult.getEntity().getURI();
System.out.println("keyword: " + keyword + " score: " + searchResult.getScore() + " local name: " + searchResult.getEntity().getLocalName() + " label: " + searchResult.getEntity().getLabel());
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.operations.utils;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.expressions.CallExpression;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.expressions.ExpressionDefaultVisitor;
import org.apache.flink.table.expressions.ResolvedExpression;
import org.apache.flink.table.expressions.ValueLiteralExpression;
import org.apache.flink.table.expressions.resolver.ExpressionResolver;
import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
import org.apache.flink.table.functions.FunctionDefinition;
import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.operations.ValuesQueryOperation;
import org.apache.flink.table.types.CollectionDataType;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.FieldsDataType;
import org.apache.flink.table.types.KeyValueDataType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.utils.LogicalTypeMerging;
import org.apache.flink.table.types.utils.TypeConversions;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.MAP;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.NULL;
import static org.apache.flink.table.types.logical.LogicalTypeRoot.ROW;
import static org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast;
/** Utility class for creating valid {@link ValuesQueryOperation} operation. */
@Internal
class ValuesOperationFactory {
/**
* Creates a valid {@link ValuesQueryOperation} operation.
*
* <p>It derives a row type based on {@link LogicalTypeMerging}. It flattens any row
* constructors. It does not flatten ROWs which are a result of e.g. a function call.
*
* <p>The resulting schema can be provided manually. If it is not, the schema will be
* automatically derived from the types of the expressions.
*/
QueryOperation create(
@Nullable ResolvedSchema expectedSchema,
List<ResolvedExpression> resolvedExpressions,
ExpressionResolver.PostResolverFactory postResolverFactory) {
List<List<ResolvedExpression>> resolvedRows = unwrapFromRowConstructor(resolvedExpressions);
if (expectedSchema != null) {
verifyAllSameSize(resolvedRows, expectedSchema.getColumnCount());
}
ResolvedSchema schema =
Optional.ofNullable(expectedSchema).orElseGet(() -> extractSchema(resolvedRows));
List<List<ResolvedExpression>> castedExpressions =
resolvedRows.stream()
.map(
row ->
convertTopLevelExpressionToExpectedRowType(
postResolverFactory,
schema.getColumnDataTypes(),
row))
.collect(Collectors.toList());
return new ValuesQueryOperation(castedExpressions, schema);
}
private ResolvedSchema extractSchema(List<List<ResolvedExpression>> resolvedRows) {
DataType[] dataTypes = findRowType(resolvedRows);
String[] fieldNames =
IntStream.range(0, dataTypes.length).mapToObj(i -> "f" + i).toArray(String[]::new);
return ResolvedSchema.physical(fieldNames, dataTypes);
}
private List<ResolvedExpression> convertTopLevelExpressionToExpectedRowType(
ExpressionResolver.PostResolverFactory postResolverFactory,
List<DataType> dataTypes,
List<ResolvedExpression> row) {
return IntStream.range(0, row.size())
.mapToObj(
i -> {
boolean typesMatch =
row.get(i)
.getOutputDataType()
.getLogicalType()
.equals(dataTypes.get(i).getLogicalType());
if (typesMatch) {
return row.get(i);
}
ResolvedExpression castedExpr = row.get(i);
DataType targetDataType = dataTypes.get(i);
return convertToExpectedType(
castedExpr, targetDataType, postResolverFactory)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Could not cast the value of the %d column: [ %s ] of a row: %s to the requested type: %s",
i,
castedExpr.asSummaryString(),
row.stream()
.map(
ResolvedExpression
::asSummaryString)
.collect(
Collectors
.joining(
", ",
"[ ",
" ]")),
targetDataType
.getLogicalType()
.asSummaryString())));
})
.collect(Collectors.toList());
}
private Optional<ResolvedExpression> convertToExpectedType(
ResolvedExpression sourceExpression,
DataType targetDataType,
ExpressionResolver.PostResolverFactory postResolverFactory) {
LogicalType sourceLogicalType = sourceExpression.getOutputDataType().getLogicalType();
LogicalType targetLogicalType = targetDataType.getLogicalType();
// if the expression is a literal try converting the literal in place instead of casting
if (sourceExpression instanceof ValueLiteralExpression) {
// Assign a type to a null literal
if (sourceLogicalType.is(NULL)) {
return Optional.of(valueLiteral(null, targetDataType));
}
// Check if the source value class is a valid input conversion class of the target type
// It may happen that a user wanted to use a secondary input conversion class as a value
// for
// a different type than what we derived.
//
// Example: we interpreted 1L as BIGINT, but user wanted to interpret it as a TIMESTAMP
// In this case long is a valid conversion class for TIMESTAMP, but a
// cast from BIGINT to TIMESTAMP is an invalid operation.
Optional<Object> value =
((ValueLiteralExpression) sourceExpression).getValueAs(Object.class);
if (value.isPresent()
&& targetLogicalType.supportsInputConversion(value.get().getClass())) {
ValueLiteralExpression convertedLiteral =
valueLiteral(
value.get(),
targetDataType.notNull().bridgedTo(value.get().getClass()));
if (targetLogicalType.isNullable()) {
return Optional.of(postResolverFactory.cast(convertedLiteral, targetDataType));
} else {
return Optional.of(convertedLiteral);
}
}
}
if (sourceExpression instanceof CallExpression) {
FunctionDefinition functionDefinition =
((CallExpression) sourceExpression).getFunctionDefinition();
if (functionDefinition == BuiltInFunctionDefinitions.ROW && targetLogicalType.is(ROW)) {
return convertRowToExpectedType(
sourceExpression, (FieldsDataType) targetDataType, postResolverFactory);
} else if (functionDefinition == BuiltInFunctionDefinitions.ARRAY
&& targetLogicalType.is(ARRAY)) {
return convertArrayToExpectedType(
sourceExpression, (CollectionDataType) targetDataType, postResolverFactory);
} else if (functionDefinition == BuiltInFunctionDefinitions.MAP
&& targetLogicalType.is(MAP)) {
return convertMapToExpectedType(
sourceExpression, (KeyValueDataType) targetDataType, postResolverFactory);
}
}
// We might not be able to cast to the expected type if the expected type was provided by
// the user
// we ignore nullability constraints here, as we let users override what we expect there,
// e.g. they
// might know that a certain function will not produce nullable values for a given input
if (supportsExplicitCast(sourceLogicalType.copy(true), targetLogicalType.copy(true))) {
return Optional.of(postResolverFactory.cast(sourceExpression, targetDataType));
} else {
return Optional.empty();
}
}
private Optional<ResolvedExpression> convertRowToExpectedType(
ResolvedExpression sourceExpression,
FieldsDataType targetDataType,
ExpressionResolver.PostResolverFactory postResolverFactory) {
List<DataType> targetDataTypes = targetDataType.getChildren();
List<ResolvedExpression> resolvedChildren = sourceExpression.getResolvedChildren();
if (resolvedChildren.size() != targetDataTypes.size()) {
return Optional.empty();
}
ResolvedExpression[] castedChildren = new ResolvedExpression[resolvedChildren.size()];
for (int i = 0; i < resolvedChildren.size(); i++) {
boolean typesMatch =
resolvedChildren
.get(i)
.getOutputDataType()
.getLogicalType()
.equals(targetDataTypes.get(i).getLogicalType());
if (typesMatch) {
castedChildren[i] = resolvedChildren.get(i);
}
ResolvedExpression child = resolvedChildren.get(i);
DataType targetChildDataType = targetDataTypes.get(i);
Optional<ResolvedExpression> castedChild =
convertToExpectedType(child, targetChildDataType, postResolverFactory);
if (!castedChild.isPresent()) {
return Optional.empty();
} else {
castedChildren[i] = castedChild.get();
}
}
return Optional.of(postResolverFactory.row(targetDataType, castedChildren));
}
private Optional<ResolvedExpression> convertArrayToExpectedType(
ResolvedExpression sourceExpression,
CollectionDataType targetDataType,
ExpressionResolver.PostResolverFactory postResolverFactory) {
DataType elementTargetDataType = targetDataType.getElementDataType();
List<ResolvedExpression> resolvedChildren = sourceExpression.getResolvedChildren();
ResolvedExpression[] castedChildren = new ResolvedExpression[resolvedChildren.size()];
for (int i = 0; i < resolvedChildren.size(); i++) {
Optional<ResolvedExpression> castedChild =
convertToExpectedType(
resolvedChildren.get(i), elementTargetDataType, postResolverFactory);
if (castedChild.isPresent()) {
castedChildren[i] = castedChild.get();
} else {
return Optional.empty();
}
}
return Optional.of(postResolverFactory.array(targetDataType, castedChildren));
}
private Optional<ResolvedExpression> convertMapToExpectedType(
ResolvedExpression sourceExpression,
KeyValueDataType targetDataType,
ExpressionResolver.PostResolverFactory postResolverFactory) {
DataType keyTargetDataType = targetDataType.getKeyDataType();
DataType valueTargetDataType = targetDataType.getValueDataType();
List<ResolvedExpression> resolvedChildren = sourceExpression.getResolvedChildren();
ResolvedExpression[] castedChildren = new ResolvedExpression[resolvedChildren.size()];
for (int i = 0; i < resolvedChildren.size(); i++) {
Optional<ResolvedExpression> castedChild =
convertToExpectedType(
resolvedChildren.get(i),
i % 2 == 0 ? keyTargetDataType : valueTargetDataType,
postResolverFactory);
if (castedChild.isPresent()) {
castedChildren[i] = castedChild.get();
} else {
return Optional.empty();
}
}
return Optional.of(postResolverFactory.map(targetDataType, castedChildren));
}
private List<List<ResolvedExpression>> unwrapFromRowConstructor(
List<ResolvedExpression> resolvedExpressions) {
return resolvedExpressions.stream()
.map(
expr ->
expr.accept(
new ExpressionDefaultVisitor<List<ResolvedExpression>>() {
@Override
public List<ResolvedExpression> visit(
CallExpression call) {
if (call.getFunctionDefinition()
== BuiltInFunctionDefinitions.ROW) {
return call.getResolvedChildren();
}
return defaultMethod(call);
}
@Override
protected List<ResolvedExpression> defaultMethod(
Expression expression) {
if (!(expression instanceof ResolvedExpression)) {
throw new TableException(
"This visitor is applied to ResolvedExpressions. We should never end up here.");
}
return Collections.singletonList(
(ResolvedExpression) expression);
}
}))
.collect(Collectors.toList());
}
private DataType[] findRowType(List<List<ResolvedExpression>> resolvedRows) {
int rowSize = findRowSize(resolvedRows);
DataType[] dataTypes = new DataType[rowSize];
IntStream.range(0, rowSize)
.forEach(
i -> {
dataTypes[i] = findCommonTypeAtPosition(resolvedRows, i);
});
return dataTypes;
}
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
LogicalType logicalType =
LogicalTypeMerging.findCommonType(typesAtIPosition)
.orElseThrow(
() -> {
Set<DataType> columnTypes =
resolvedRows.stream()
.map(row -> row.get(i).getOutputDataType())
.collect(
Collectors.toCollection(
LinkedHashSet::new));
return new ValidationException(
String.format(
"Types in fromValues(...) must have a common super type. Could not find a common type"
+ " for all rows at column %d.\n"
+ "Could not find a common super type for types: %s",
i, columnTypes));
});
return TypeConversions.fromLogicalToDataType(logicalType);
}
private List<LogicalType> extractLogicalTypesAtPosition(
List<List<ResolvedExpression>> resolvedRows, int rowPosition) {
List<LogicalType> typesAtIPosition = new ArrayList<>();
for (List<ResolvedExpression> resolvedExpression : resolvedRows) {
LogicalType outputLogicalType =
resolvedExpression.get(rowPosition).getOutputDataType().getLogicalType();
typesAtIPosition.add(outputLogicalType);
}
return typesAtIPosition;
}
private int findRowSize(List<List<ResolvedExpression>> resolvedRows) {
List<ResolvedExpression> firstRow = resolvedRows.get(0);
int potentialRowSize = firstRow.size();
verifyAllSameSize(resolvedRows, potentialRowSize);
return potentialRowSize;
}
private void verifyAllSameSize(
List<List<ResolvedExpression>> resolvedRows, int potentialRowSize) {
Optional<List<ResolvedExpression>> differentSizeRow =
resolvedRows.stream().filter(row -> row.size() != potentialRowSize).findAny();
if (differentSizeRow.isPresent()) {
throw new ValidationException(
String.format(
"All rows in a fromValues(...) clause must have the same fields number. Row %s has a"
+ " different length than the expected size: %d.",
differentSizeRow.get(), potentialRowSize));
}
}
}
| |
/*
* ServerSideTableSearcher.java
*
* This file is part of SQL Workbench/J, http://www.sql-workbench.net
*
* Copyright 2002-2015, Thomas Kellerer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* To contact the author please send an email to: support@sql-workbench.net
*
*/
package workbench.db.search;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import workbench.WbManager;
import workbench.interfaces.TableSearchConsumer;
import workbench.log.LogMgr;
import workbench.db.DbMetadata;
import workbench.db.TableDefinition;
import workbench.db.TableIdentifier;
import workbench.db.TableSelectBuilder;
import workbench.db.WbConnection;
import workbench.storage.DataStore;
import workbench.storage.filter.ColumnExpression;
import workbench.storage.filter.ContainsComparator;
import workbench.util.ExceptionUtil;
import workbench.util.SqlUtil;
import workbench.util.StringUtil;
import workbench.util.WbThread;
/**
* An implementation of the TableSearcher interface that uses SELECT
* statements with a LIKE condition to search for data.
*
* @author Thomas Kellerer
*/
public class ServerSideTableSearcher
implements TableDataSearcher
{
private List<TableIdentifier> tablesToSearch;
private String columnFunction;
private TableSearchConsumer display;
private String criteria;
private WbConnection connection;
private boolean cancelSearch = false;
private boolean isRunning = false;
private Statement query = null;
private Thread searchThread;
private int maxRows = 0;
private boolean retrieveLobColumns = true;
private DataStore result = null;
@Override
public void startBackgroundSearch()
{
this.cancelSearch = false;
this.searchThread = new WbThread("TableSearcher Thread")
{
@Override
public void run()
{
search();
}
};
this.searchThread.start();
}
@Override
public void cancelSearch()
{
this.cancelSearch = true;
try
{
if (this.searchThread != null) this.searchThread.interrupt();
if (this.query != null)
{
this.query.cancel();
}
if (this.result != null)
{
result.cancelRetrieve();
}
}
catch (Throwable e)
{
LogMgr.logWarning("TableSearcher.cancelSearc()", "Error when cancelling", e);
}
}
private void setRunning(boolean aFlag)
{
this.isRunning = aFlag;
if (this.display != null)
{
if (aFlag) this.display.searchStarted();
else this.display.searchEnded();
}
if (!aFlag) this.cancelSearch = false;
}
@Override
public boolean isRunning()
{
return this.isRunning;
}
@Override
public void search()
{
if (this.tablesToSearch == null || this.tablesToSearch.isEmpty()) return;
this.setRunning(true);
try
{
this.connection.setBusy(true);
long total = tablesToSearch.size();
long current = 1;
for (TableIdentifier tbl : tablesToSearch)
{
this.searchTable(tbl, current, total);
if (this.cancelSearch) break;
current ++;
}
if (this.display != null) this.display.setStatusText("");
}
catch (Throwable th)
{
LogMgr.logError("TableSearcher.doSearch()", "Error searching database", th);
}
finally
{
this.setRunning(false);
this.connection.setBusy(false);
}
}
@Override
public void setRetrieveLobColumns(boolean flag)
{
this.retrieveLobColumns = flag;
}
private void searchTable(TableIdentifier table, long current, long total)
{
ResultSet rs = null;
Savepoint sp = null;
boolean useSavepoint = connection.getDbSettings().useSavePointForDML();
try
{
String sql = this.buildSqlForTable(table);
if (this.display != null) this.display.setCurrentTable(table.getTableExpression(), sql, current, total);
if (sql == null) return;
if (!connection.getAutoCommit() && useSavepoint)
{
try
{
sp = connection.setSavepoint();
}
catch (SQLException e)
{
LogMgr.logWarning("TableSearcher.searchTable()", "Could not create savepoint", e);
sp = null;
useSavepoint = false;
}
}
this.query = this.connection.createStatementForQuery();
this.query.setMaxRows(this.maxRows);
rs = this.query.executeQuery(sql);
result = new DataStore(rs, this.connection, true);
result.setGeneratingSql(sql);
result.setResultName(table.getTableName());
result.setUpdateTableToBeUsed(table);
if (this.display != null) this.display.tableSearched(table, result);
result = null;
if (sp != null)
{
connection.releaseSavepoint(sp);
sp = null;
}
}
catch (OutOfMemoryError mem)
{
WbManager.getInstance().showOutOfMemoryError();
}
catch (Exception e)
{
LogMgr.logError("TableSearcher.searchTable()", "Error retrieving data for " + table.getTableExpression(), e);
if (this.display != null) this.display.error(ExceptionUtil.getDisplay(e));
if (sp != null)
{
connection.rollback(sp);
}
}
finally
{
SqlUtil.closeAll(rs, query);
this.query = null;
if (sp != null)
{
connection.releaseSavepoint(sp);
}
}
}
private boolean isSearchable(int sqlType, String dbmsType)
{
if (sqlType == Types.VARCHAR || sqlType == Types.CHAR ||
sqlType == Types.NVARCHAR || sqlType == Types.NCHAR)
{
return true;
}
return connection.getDbSettings().isSearchable(dbmsType);
}
private String buildSqlForTable(TableIdentifier tbl)
throws SQLException
{
DbMetadata meta = this.connection.getMetadata();
TableDefinition def = meta.getTableDefinition(tbl, false);
return buildSqlForTable(def, "tablesearch");
}
public String buildSqlForTable(TableDefinition def, String sqlTemplateKey)
throws SQLException
{
int colCount = def.getColumnCount();
if (colCount == 0) return StringUtil.EMPTY_STRING;
StringBuilder sql = new StringBuilder(colCount * 120);
TableSelectBuilder builder = new TableSelectBuilder(this.connection, sqlTemplateKey);
builder.setIncludeBLOBColumns(this.retrieveLobColumns);
builder.setIncludeCLOBColumns(this.retrieveLobColumns);
sql.append(builder.getSelectForColumns(def.getTable(), def.getColumns(), -1));
sql.append("\n WHERE ");
boolean first = true;
int colcount = 0;
Pattern aliasPattern = Pattern.compile("\\s+AS\\s+", Pattern.CASE_INSENSITIVE);
for (int i=0; i < colCount; i++)
{
String colName = def.getColumns().get(i).getColumnName();
String dbmsType = def.getColumns().get(i).getDbmsType();
int sqlType = def.getColumns().get(i).getDataType();
String expr = builder.getColumnExpression(def.getColumns().get(i));
boolean isExpression = !colName.equalsIgnoreCase(expr);
if (isExpression || isSearchable(sqlType, dbmsType))
{
if (!isExpression)
{
expr = this.connection.getMetadata().quoteObjectname(colName);
}
else
{
// Check if the column expression was defined with a column alias
// in that case we have to remove the alias otherwise it cannot be
// used in a WHERE condition
Matcher m = aliasPattern.matcher(expr);
if (m.find())
{
int pos = m.start();
expr = expr.substring(0, pos);
}
}
colcount ++;
if (!first)
{
sql.append(" OR ");
}
if (this.columnFunction != null)
{
sql.append(StringUtil.replace(this.columnFunction, "$col$", expr));
}
else
{
sql.append(expr);
}
sql.append(" LIKE '");
sql.append(this.criteria);
sql.append('\'');
if (i < colCount - 1) sql.append('\n');
first = false;
}
}
if (colcount == 0)
{
LogMgr.logWarning("TableSearcher.buildSqlForTable()", "Table " + def.getTable().getTableExpression() + " not beeing searched because no character columns were found");
return null;
}
else
{
return sql.toString();
}
}
public boolean isCaseSensitive()
{
if (this.columnFunction == null) return false;
if (this.criteria == null) return false;
boolean sensitive = this.connection.getDbSettings().isStringComparisonCaseSensitive();
if (!sensitive) return true;
String func = this.columnFunction.toLowerCase();
// upper() lower() is for Oracle, Postgres, Firebird/Interbase and MS SQL Server
// lcase, ucase is for Access and HSQLDB
if (func.indexOf("upper") > -1 || func.indexOf("ucase") > -1)
{
return this.criteria.toUpperCase().equals(this.criteria);
}
if (func.indexOf("lower") > -1 || func.indexOf("lcase") > -1)
{
return this.criteria.toLowerCase().equals(this.criteria);
}
return false;
}
public boolean setColumnFunction(String aColFunc)
{
this.columnFunction = null;
boolean setResult = false;
if (StringUtil.isNonBlank(aColFunc))
{
if (aColFunc.equalsIgnoreCase("$col$"))
{
this.columnFunction = null;
setResult = true;
}
else if (aColFunc.indexOf("$col$") > -1)
{
this.columnFunction = aColFunc;
setResult = true;
}
else if (aColFunc.indexOf("$COL$") > -1)
{
this.columnFunction = StringUtil.replace(aColFunc, "$COL$", "$col$");
setResult = true;
}
}
return setResult;
}
@Override
public void setTableNames(List<TableIdentifier> tables)
{
if (tables == null)
{
this.tablesToSearch = new ArrayList<>(0);
}
else
{
this.tablesToSearch = new ArrayList<>(tables);
}
}
public TableSearchConsumer getDisplay()
{
return display;
}
@Override
public void setConsumer(TableSearchConsumer searchDisplay)
{
this.display = searchDisplay;
}
@Override
public String getCriteria()
{
return criteria;
}
@Override
public void setCriteria(String aText, boolean ignoreCase)
{
if (aText == null) return;
this.criteria = StringUtil.trimQuotes(aText);
}
@Override
public void setConnection(WbConnection conn)
{
this.connection = conn;
}
@Override
public void setMaxRows(int max)
{
this.maxRows = max;
}
@Override
public ColumnExpression getSearchExpression()
{
String expressionPattern = StringUtil.trimQuotes(criteria.replaceAll("[%_]", ""));
ColumnExpression searchPattern = new ColumnExpression("*", new ContainsComparator(), expressionPattern);
searchPattern.setIgnoreCase(isCaseSensitive());
return searchPattern;
}
}
| |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.platform;
import com.intellij.CommonBundle;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ModuleRootModificationUtil;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.ProjectLevelVcsManager;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.projectImport.ProjectAttachProcessor;
import com.intellij.projectImport.ProjectOpenedCallback;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
/**
* @author yole
*/
public class ModuleAttachProcessor extends ProjectAttachProcessor {
private static final Logger LOG = Logger.getInstance(ModuleAttachProcessor.class);
@Override
public boolean attachToProject(Project project, File projectDir, @Nullable ProjectOpenedCallback callback) {
if (!projectDir.exists()) {
Project newProject = ((ProjectManagerEx)ProjectManager.getInstance())
.newProject(projectDir.getParentFile().getName(), projectDir.getParent(), true, false);
if (newProject == null) {
return false;
}
final VirtualFile baseDir = LocalFileSystem.getInstance().refreshAndFindFileByPath(projectDir.getParent());
PlatformProjectOpenProcessor.runDirectoryProjectConfigurators(baseDir, newProject);
newProject.save();
AccessToken token = ApplicationManager.getApplication().acquireWriteActionLock(null);
try {
Disposer.dispose(newProject);
}
finally {
token.finish();
}
}
final String[] files = projectDir.list();
if (files != null) {
for (String file : files) {
if (FileUtilRt.extensionEquals(file, "iml")) {
VirtualFile imlFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(projectDir, file));
if (imlFile != null) {
attachModule(project, imlFile, callback);
return true;
}
}
}
}
int rc = Messages.showYesNoDialog(project, "The project at " +
FileUtil.toSystemDependentName(projectDir.getPath()) +
" uses a non-standard layout and cannot be attached to this project. Would you like to open it in a new window?",
"Open Project", Messages.getQuestionIcon());
return rc != Messages.YES;
}
private static void attachModule(Project project, VirtualFile imlFile, @Nullable ProjectOpenedCallback callback) {
try {
final ModifiableModuleModel model = ModuleManager.getInstance(project).getModifiableModel();
final Module module = model.loadModule(imlFile.getPath());
AccessToken token = WriteAction.start();
try {
model.commit();
}
finally {
token.finish();
}
final Module newModule = ModuleManager.getInstance(project).findModuleByName(module.getName());
assert newModule != null;
final Module primaryModule = addPrimaryModuleDependency(project, newModule);
if (primaryModule != null) {
VirtualFile dotIdeaDir = imlFile.getParent();
if (dotIdeaDir != null) {
updateVcsMapping(primaryModule, dotIdeaDir.getParent());
}
}
if (callback != null) {
callback.projectOpened(project, newModule);
}
}
catch (Exception ex) {
LOG.info(ex);
Messages.showErrorDialog(project, "Cannot attach project: " + ex.getMessage(), CommonBundle.getErrorTitle());
}
}
private static void updateVcsMapping(Module primaryModule, VirtualFile addedModuleContentRoot) {
final Project project = primaryModule.getProject();
final ProjectLevelVcsManager vcsManager = ProjectLevelVcsManager.getInstance(project);
final List<VcsDirectoryMapping> mappings = vcsManager.getDirectoryMappings();
if (mappings.size() == 1) {
final VirtualFile[] contentRoots = ModuleRootManager.getInstance(primaryModule).getContentRoots();
// if we had one mapping for the root of the primary module and the added module uses the same VCS, change mapping to <Project Root>
if (contentRoots.length == 1 && FileUtil.filesEqual(new File(contentRoots[0].getPath()), new File(mappings.get(0).getDirectory()))) {
final AbstractVcs vcs = vcsManager.findVersioningVcs(addedModuleContentRoot);
if (vcs != null && vcs.getName().equals(mappings.get(0).getVcs())) {
vcsManager.setDirectoryMappings(Arrays.asList(new VcsDirectoryMapping("", vcs.getName())));
return;
}
}
}
final AbstractVcs vcs = vcsManager.findVersioningVcs(addedModuleContentRoot);
if (vcs != null) {
ArrayList<VcsDirectoryMapping> newMappings = new ArrayList<VcsDirectoryMapping>(mappings);
newMappings.add(new VcsDirectoryMapping(addedModuleContentRoot.getPath(), vcs.getName()));
vcsManager.setDirectoryMappings(newMappings);
}
}
@Nullable
private static Module addPrimaryModuleDependency(Project project, @NotNull Module newModule) {
final Module module = getPrimaryModule(project);
if (module != null && module != newModule) {
ModuleRootModificationUtil.addDependency(module, newModule);
return module;
}
return null;
}
@Nullable
public static Module getPrimaryModule(Project project) {
if (!canAttachToProject()) {
return null;
}
for (Module module : ModuleManager.getInstance(project).getModules()) {
final VirtualFile[] roots = ModuleRootManager.getInstance(module).getContentRoots();
for (VirtualFile root : roots) {
if (Comparing.equal(root, project.getBaseDir())) {
return module;
}
}
}
return null;
}
public static List<Module> getSortedModules(Project project) {
List<Module> result = new ArrayList<Module>();
final Module primaryModule = getPrimaryModule(project);
final Module[] modules = ModuleManager.getInstance(project).getModules();
for (Module module : modules) {
if (module != primaryModule) {
result.add(module);
}
}
Collections.sort(result, new Comparator<Module>() {
@Override
public int compare(Module module, Module module2) {
return module.getName().compareTo(module2.getName());
}
});
if (primaryModule != null) {
result.add(0, primaryModule);
}
return result;
}
/**
* @param project the project
* @return null if either multi-projects are not enabled or the project has only one module
*/
@Nullable
public static String getMultiProjectDisplayName(@NotNull Project project) {
if (!ProjectAttachProcessor.canAttachToProject()) {
return null;
}
final Module[] modules = ModuleManager.getInstance(project).getModules();
if (modules.length <= 1) {
return null;
}
Module primaryModule = getPrimaryModule(project);
if (primaryModule == null) {
primaryModule = modules [0];
}
final StringBuilder result = new StringBuilder(primaryModule.getName());
result.append(", ");
for (Module module : modules) {
if (module == primaryModule) continue;
result.append(module.getName());
break;
}
if (modules.length > 2) {
result.append("...");
}
return result.toString();
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.gvt;
import java.awt.Shape;
import java.awt.Rectangle;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.batik.gvt.event.GraphicsNodeChangeAdapter;
import org.apache.batik.gvt.event.GraphicsNodeChangeEvent;
import org.apache.batik.ext.awt.image.renderable.Filter;
/**
* This class tracks the changes on a GVT tree
*
* @author <a href="mailto:Thomas.DeWeeese@Kodak.com">Thomas DeWeese</a>
* @version $Id$
*/
public class UpdateTracker extends GraphicsNodeChangeAdapter {
Map dirtyNodes = null;
Map fromBounds = new HashMap();
protected static Rectangle2D NULL_RECT = new Rectangle();
public UpdateTracker(){
}
/**
* Tells whether the GVT tree has changed.
*/
public boolean hasChanged() {
return (dirtyNodes != null);
}
/**
* Returns the list of dirty areas on GVT.
*/
public List getDirtyAreas() {
if (dirtyNodes == null)
return null;
List ret = new LinkedList();
Set keys = dirtyNodes.keySet();
Iterator i = keys.iterator();
while (i.hasNext()) {
WeakReference gnWRef = (WeakReference)i.next();
GraphicsNode gn = (GraphicsNode)gnWRef.get();
// GraphicsNode srcGN = gn;
// if the weak ref has been cleared then this node is no
// longer part of the GVT tree (and the change should be
// reflected in some ancestor that should also be in the
// dirty list).
if (gn == null) continue;
AffineTransform oat;
oat = (AffineTransform)dirtyNodes.get(gnWRef);
if (oat != null){
oat = new AffineTransform(oat);
}
Rectangle2D srcORgn = (Rectangle2D)fromBounds.remove(gnWRef);
Rectangle2D srcNRgn = null;
AffineTransform nat = null;
if (!(srcORgn instanceof ChngSrcRect)) {
// For change srcs don't use the new bounds of parent node.
srcNRgn = gn.getBounds();
nat = gn.getTransform();
if (nat != null)
nat = new AffineTransform(nat);
}
// System.out.println("Rgns: " + srcORgn + " - " + srcNRgn);
// System.out.println("ATs: " + oat + " - " + nat);
do {
// f.invalidateCache(oRng);
// f.invalidateCache(nRng);
// f = gn.getEnableBackgroundGraphicsNodeRable(false);
// (need to push rgn through filter chain if any...)
// f.invalidateCache(oRng);
// f.invalidateCache(nRng);
gn = gn.getParent();
if (gn == null)
break; // We reached the top of the tree
Filter f= gn.getFilter();
if ( f != null) {
srcNRgn = f.getBounds2D();
nat = null;
}
// Get the parent's current Affine
AffineTransform at = gn.getTransform();
// Get the parent's Affine last time we rendered.
gnWRef = gn.getWeakReference();
AffineTransform poat = (AffineTransform)dirtyNodes.get(gnWRef);
if (poat == null) poat = at;
if (poat != null) {
if (oat != null)
oat.preConcatenate(poat);
else
oat = new AffineTransform(poat);
}
if (at != null){
if (nat != null)
nat.preConcatenate(at);
else
nat = new AffineTransform(at);
}
} while (true);
if (gn == null) {
// We made it to the root graphics node so add them.
// System.out.println
// ("Adding: " + oat + " - " + nat + "\n" +
// srcORgn + "\n" + srcNRgn + "\n");
// <!>
Shape oRgn = srcORgn;
if ((oRgn != null) && (oRgn != NULL_RECT)) {
if (oat != null)
oRgn = oat.createTransformedShape(srcORgn);
// System.err.println("GN: " + srcGN);
// System.err.println("Src: " + oRgn.getBounds2D());
ret.add(oRgn);
}
if (srcNRgn != null) {
Shape nRgn = srcNRgn;
if (nat != null)
nRgn = nat.createTransformedShape(srcNRgn);
if (nRgn != null)
ret.add(nRgn);
}
}
}
fromBounds.clear();
dirtyNodes.clear();
return ret;
}
/**
* This returns the dirty region for gn in the coordinate system
* given by <code>at</code>.
* @param gn Node tree to return dirty region for.
* @param at Affine transform to coordinate space to accumulate
* dirty regions in.
*/
public Rectangle2D getNodeDirtyRegion(GraphicsNode gn,
AffineTransform at) {
WeakReference gnWRef = gn.getWeakReference();
AffineTransform nat = (AffineTransform)dirtyNodes.get(gnWRef);
if (nat == null) nat = gn.getTransform();
if (nat != null) {
at = new AffineTransform(at);
at.concatenate(nat);
}
Filter f= gn.getFilter();
Rectangle2D ret = null;
if (gn instanceof CompositeGraphicsNode) {
CompositeGraphicsNode cgn = (CompositeGraphicsNode)gn;
Iterator iter = cgn.iterator();
while (iter.hasNext()) {
GraphicsNode childGN = (GraphicsNode)iter.next();
Rectangle2D r2d = getNodeDirtyRegion(childGN, at);
if (r2d != null) {
if (f != null) {
// If we have a filter and a change region
// Update our full filter extents.
Shape s = at.createTransformedShape(f.getBounds2D());
ret = s.getBounds2D();
break;
}
if ((ret == null) || (ret == NULL_RECT)) ret = r2d;
//else ret = ret.createUnion(r2d);
else ret.add(r2d);
}
}
} else {
ret = (Rectangle2D)fromBounds.remove(gnWRef);
if (ret == null) {
if (f != null) ret = f.getBounds2D();
else ret = gn.getBounds();
} else if (ret == NULL_RECT)
ret = null;
if (ret != null)
ret = at.createTransformedShape(ret).getBounds2D();
}
return ret;
}
public Rectangle2D getNodeDirtyRegion(GraphicsNode gn) {
return getNodeDirtyRegion(gn, new AffineTransform());
}
/**
* Receives notification of a change to a GraphicsNode.
* @param gnce The event object describing the GraphicsNode change.
*/
public void changeStarted(GraphicsNodeChangeEvent gnce) {
// System.out.println("A node has changed for: " + this);
GraphicsNode gn = gnce.getGraphicsNode();
WeakReference gnWRef = gn.getWeakReference();
boolean doPut = false;
if (dirtyNodes == null) {
dirtyNodes = new HashMap();
doPut = true;
} else if (!dirtyNodes.containsKey(gnWRef))
doPut = true;
if (doPut) {
AffineTransform at = gn.getTransform();
if (at != null) at = (AffineTransform)at.clone();
else at = new AffineTransform();
dirtyNodes.put(gnWRef, at);
}
GraphicsNode chngSrc = gnce.getChangeSrc();
Rectangle2D rgn = null;
if (chngSrc != null) {
// A child node is moving in the tree so assign it's dirty
// regions to this node before it moves.
Rectangle2D drgn = getNodeDirtyRegion(chngSrc);
if (drgn != null)
rgn = new ChngSrcRect(drgn);
} else {
// Otherwise just use gn's current region.
rgn = gn.getBounds();
}
// Add this dirty region to any existing dirty region.
Rectangle2D r2d = (Rectangle2D)fromBounds.remove(gnWRef);
if (rgn != null) {
if ((r2d != null) && (r2d != NULL_RECT)) {
// System.err.println("GN: " + gn);
// System.err.println("R2d: " + r2d);
// System.err.println("Rgn: " + rgn);
//r2d = r2d.createUnion(rgn);
r2d.add(rgn);
// System.err.println("Union: " + r2d);
}
else r2d = rgn;
}
// if ((gn instanceof CompositeGraphicsNode) &&
// (r2d.getWidth() > 200)) {
// new Exception("Adding Large: " + gn).printStackTrace();
// }
// Store the bounds for the future.
if (r2d == null)
r2d = NULL_RECT;
fromBounds.put(gnWRef, r2d);
}
class ChngSrcRect extends Rectangle2D.Float {
ChngSrcRect(Rectangle2D r2d) {
super((float)r2d.getX(), (float)r2d.getY(),
(float)r2d.getWidth(), (float)r2d.getHeight());
}
}
/**
* Clears the tracker.
*/
public void clear() {
dirtyNodes = null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.parse;
import java.lang.reflect.Constructor;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.ExpressionType;
import org.apache.phoenix.expression.function.AvgAggregateFunction;
import org.apache.phoenix.expression.function.CountAggregateFunction;
import org.apache.phoenix.expression.function.CurrentDateFunction;
import org.apache.phoenix.expression.function.CurrentTimeFunction;
import org.apache.phoenix.expression.function.DistinctCountAggregateFunction;
import org.apache.phoenix.expression.function.FunctionExpression;
import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunctionInfo;
import org.apache.phoenix.parse.JoinTableNode.JoinType;
import org.apache.phoenix.parse.LikeParseNode.LikeType;
import org.apache.phoenix.schema.PIndexState;
import org.apache.phoenix.schema.PTable.IndexType;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.TypeMismatchException;
import org.apache.phoenix.schema.stats.StatisticsCollectionScope;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PTimestamp;
import org.apache.phoenix.util.SchemaUtil;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
/**
*
* Factory used by parser to construct object model while parsing a SQL statement
*
*
* @since 0.1
*/
public class ParseNodeFactory {
private static final String ARRAY_ELEM = "ARRAY_ELEM";
// TODO: Use Google's Reflection library instead to find aggregate functions
@SuppressWarnings("unchecked")
private static final List<Class<? extends FunctionExpression>> CLIENT_SIDE_BUILT_IN_FUNCTIONS = Arrays.<Class<? extends FunctionExpression>>asList(
CurrentDateFunction.class,
CurrentTimeFunction.class,
AvgAggregateFunction.class
);
private static final Map<BuiltInFunctionKey, BuiltInFunctionInfo> BUILT_IN_FUNCTION_MAP = Maps.newHashMap();
private static final Multimap<String, BuiltInFunctionInfo> BUILT_IN_FUNCTION_MULTIMAP = ArrayListMultimap.create();
private static final BigDecimal MAX_LONG = BigDecimal.valueOf(Long.MAX_VALUE);
/**
*
* Key used to look up a built-in function using the combination of
* the lowercase name and the number of arguments. This disambiguates
* the aggregate MAX(<col>) from the non aggregate MAX(<col1>,<col2>).
*
*
* @since 0.1
*/
public static class BuiltInFunctionKey {
private final String upperName;
private final int argCount;
public BuiltInFunctionKey(String lowerName, int argCount) {
this.upperName = lowerName;
this.argCount = argCount;
}
@Override
public String toString() {
return upperName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + argCount;
result = prime * result + ((upperName == null) ? 0 : upperName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
BuiltInFunctionKey other = (BuiltInFunctionKey)obj;
if (argCount != other.argCount) return false;
if (!upperName.equals(other.upperName)) return false;
return true;
}
}
private static void addBuiltInFunction(Class<? extends FunctionExpression> f) throws Exception {
BuiltInFunction d = f.getAnnotation(BuiltInFunction.class);
if (d == null) {
return;
}
int nArgs = d.args().length;
BuiltInFunctionInfo value = new BuiltInFunctionInfo(f, d);
if (d.classType() != FunctionParseNode.FunctionClassType.ABSTRACT) {
BUILT_IN_FUNCTION_MULTIMAP.put(value.getName(), value);
}
if (d.classType() != FunctionParseNode.FunctionClassType.DERIVED) {
do {
// Add function to function map, throwing if conflicts found
// Add entry for each possible version of function based on arguments that are not required to be present (i.e. arg with default value)
BuiltInFunctionKey key = new BuiltInFunctionKey(value.getName(), nArgs);
if (BUILT_IN_FUNCTION_MAP.put(key, value) != null) {
throw new IllegalStateException("Multiple " + value.getName() + " functions with " + nArgs + " arguments");
}
} while (--nArgs >= 0 && d.args()[nArgs].defaultValue().length() > 0);
// Look for default values that aren't at the end and throw
while (--nArgs >= 0) {
if (d.args()[nArgs].defaultValue().length() > 0) {
throw new IllegalStateException("Function " + value.getName() + " has non trailing default value of '" + d.args()[nArgs].defaultValue() + "'. Only trailing arguments may have default values");
}
}
}
}
/**
* Reflect this class and populate static structures from it.
* Don't initialize in static block because we have a circular dependency
*/
private synchronized static void initBuiltInFunctionMap() {
if (!BUILT_IN_FUNCTION_MAP.isEmpty()) {
return;
}
Class<? extends FunctionExpression> f = null;
try {
// Reflection based parsing which yields direct explicit function evaluation at runtime
for (int i = 0; i < CLIENT_SIDE_BUILT_IN_FUNCTIONS.size(); i++) {
f = CLIENT_SIDE_BUILT_IN_FUNCTIONS.get(i);
addBuiltInFunction(f);
}
for (ExpressionType et : ExpressionType.values()) {
Class<? extends Expression> ec = et.getExpressionClass();
if (FunctionExpression.class.isAssignableFrom(ec)) {
@SuppressWarnings("unchecked")
Class<? extends FunctionExpression> c = (Class<? extends FunctionExpression>)ec;
addBuiltInFunction(f = c);
}
}
} catch (Exception e) {
throw new RuntimeException("Failed initialization of built-in functions at class '" + f + "'", e);
}
}
private static BuiltInFunctionInfo getInfo(String name, List<ParseNode> children) {
return get(SchemaUtil.normalizeIdentifier(name), children);
}
public static BuiltInFunctionInfo get(String normalizedName, List<ParseNode> children) {
initBuiltInFunctionMap();
BuiltInFunctionInfo info = BUILT_IN_FUNCTION_MAP.get(new BuiltInFunctionKey(normalizedName,children.size()));
return info;
}
public static Multimap<String, BuiltInFunctionInfo> getBuiltInFunctionMultimap(){
initBuiltInFunctionMap();
return BUILT_IN_FUNCTION_MULTIMAP;
}
public ParseNodeFactory() {
}
private static AtomicInteger tempAliasCounter = new AtomicInteger(0);
public static String createTempAlias() {
return "$" + tempAliasCounter.incrementAndGet();
}
public ExplainStatement explain(BindableStatement statement) {
return new ExplainStatement(statement);
}
public AliasedNode aliasedNode(String alias, ParseNode expression) {
return new AliasedNode(alias, expression);
}
public AddParseNode add(List<ParseNode> children) {
return new AddParseNode(children);
}
public SubtractParseNode subtract(List<ParseNode> children) {
return new SubtractParseNode(children);
}
public MultiplyParseNode multiply(List<ParseNode> children) {
return new MultiplyParseNode(children);
}
public ModulusParseNode modulus(List<ParseNode> children) {
return new ModulusParseNode(children);
}
public AndParseNode and(List<ParseNode> children) {
return new AndParseNode(children);
}
public FamilyWildcardParseNode family(String familyName){
return new FamilyWildcardParseNode(familyName, false);
}
public TableWildcardParseNode tableWildcard(TableName tableName) {
return new TableWildcardParseNode(tableName, false);
}
public WildcardParseNode wildcard() {
return WildcardParseNode.INSTANCE;
}
public BetweenParseNode between(ParseNode l, ParseNode r1, ParseNode r2, boolean negate) {
return new BetweenParseNode(l, r1, r2, negate);
}
public BindParseNode bind(String bind) {
return new BindParseNode(bind);
}
public StringConcatParseNode concat(List<ParseNode> children) {
return new StringConcatParseNode(children);
}
public ColumnParseNode column(TableName tableName, String columnName, String alias) {
return new ColumnParseNode(tableName, columnName, alias);
}
public ColumnName columnName(String columnName) {
return new ColumnName(columnName);
}
public ColumnName columnName(String familyName, String columnName) {
return new ColumnName(familyName, columnName);
}
public PropertyName propertyName(String propertyName) {
return new PropertyName(propertyName);
}
public PropertyName propertyName(String familyName, String propertyName) {
return new PropertyName(familyName, propertyName);
}
public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName, boolean isNull, Integer maxLength, Integer scale, boolean isPK, SortOrder sortOrder, String expressionStr, boolean isRowTimestamp) {
return new ColumnDef(columnDefName, sqlTypeName, isNull, maxLength, scale, isPK, sortOrder, expressionStr, isRowTimestamp);
}
public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName,
boolean isArray, Integer arrSize, Boolean isNull,
Integer maxLength, Integer scale, boolean isPK,
SortOrder sortOrder, String expressionStr, boolean isRowTimestamp) {
return new ColumnDef(columnDefName, sqlTypeName,
isArray, arrSize, isNull,
maxLength, scale, isPK,
sortOrder, expressionStr, isRowTimestamp);
}
public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName, boolean isArray, Integer arrSize, Boolean isNull, Integer maxLength, Integer scale, boolean isPK,
SortOrder sortOrder, boolean isRowTimestamp) {
return new ColumnDef(columnDefName, sqlTypeName, isArray, arrSize, isNull, maxLength, scale, isPK, sortOrder, null, isRowTimestamp);
}
public ColumnDefInPkConstraint columnDefInPkConstraint(ColumnName columnDefName, SortOrder sortOrder, boolean isRowTimestamp) {
return new ColumnDefInPkConstraint(columnDefName, sortOrder, isRowTimestamp);
}
public PrimaryKeyConstraint primaryKey(String name, List<ColumnDefInPkConstraint> columnDefs) {
return new PrimaryKeyConstraint(name, columnDefs);
}
public IndexKeyConstraint indexKey( List<Pair<ParseNode, SortOrder>> parseNodeAndSortOrder) {
return new IndexKeyConstraint(parseNodeAndSortOrder);
}
public CreateTableStatement createTable(TableName tableName, ListMultimap<String,Pair<String,Object>> props, List<ColumnDef> columns, PrimaryKeyConstraint pkConstraint, List<ParseNode> splits, PTableType tableType, boolean ifNotExists, TableName baseTableName, ParseNode tableTypeIdNode, int bindCount, Boolean immutableRows) {
return new CreateTableStatement(tableName, props, columns, pkConstraint, splits, tableType, ifNotExists, baseTableName, tableTypeIdNode, bindCount, immutableRows);
}
public CreateSchemaStatement createSchema(String schemaName, boolean ifNotExists) {
return new CreateSchemaStatement(schemaName, ifNotExists);
}
public CreateIndexStatement createIndex(NamedNode indexName, NamedTableNode dataTable, IndexKeyConstraint ikConstraint, List<ColumnName> includeColumns, List<ParseNode> splits, ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType,boolean async, int bindCount, Map<String, UDFParseNode> udfParseNodes) {
return new CreateIndexStatement(indexName, dataTable, ikConstraint, includeColumns, splits, props, ifNotExists, indexType, async, bindCount, udfParseNodes);
}
public CreateSequenceStatement createSequence(TableName tableName, ParseNode startsWith,
ParseNode incrementBy, ParseNode cacheSize, ParseNode minValue, ParseNode maxValue,
boolean cycle, boolean ifNotExits, int bindCount) {
return new CreateSequenceStatement(tableName, startsWith, incrementBy, cacheSize, minValue,
maxValue, cycle, ifNotExits, bindCount);
}
public CreateFunctionStatement createFunction(PFunction functionInfo, boolean temporary, boolean isReplace) {
return new CreateFunctionStatement(functionInfo, temporary, isReplace);
}
public AddJarsStatement addJars(List<LiteralParseNode> jarPaths) {
return new AddJarsStatement(jarPaths);
}
public ListJarsStatement listJars() {
return new ListJarsStatement();
}
public DeleteJarStatement deleteJar(LiteralParseNode jarPath) {
return new DeleteJarStatement(jarPath);
}
public DropFunctionStatement dropFunction(String functionName, boolean ifExists) {
return new DropFunctionStatement(functionName, ifExists);
}
public DropSequenceStatement dropSequence(TableName tableName, boolean ifExits, int bindCount){
return new DropSequenceStatement(tableName, ifExits, bindCount);
}
public SequenceValueParseNode currentValueFor(TableName tableName) {
return new SequenceValueParseNode(tableName, SequenceValueParseNode.Op.CURRENT_VALUE, null);
}
public SequenceValueParseNode nextValueFor(TableName tableName, ParseNode numToAllocateNode) {
return new SequenceValueParseNode(tableName, SequenceValueParseNode.Op.NEXT_VALUE, numToAllocateNode);
}
public AddColumnStatement addColumn(NamedTableNode table, PTableType tableType, List<ColumnDef> columnDefs, boolean ifNotExists, ListMultimap<String,Pair<String,Object>> props) {
return new AddColumnStatement(table, tableType, columnDefs, ifNotExists, props);
}
public DropColumnStatement dropColumn(NamedTableNode table, PTableType tableType, List<ColumnName> columnNodes, boolean ifExists) {
return new DropColumnStatement(table, tableType, columnNodes, ifExists);
}
public DropTableStatement dropTable(TableName tableName, PTableType tableType, boolean ifExists, boolean cascade) {
return new DropTableStatement(tableName, tableType, ifExists, cascade, false);
}
public DropIndexStatement dropIndex(NamedNode indexName, TableName tableName, boolean ifExists) {
return new DropIndexStatement(indexName, tableName, ifExists);
}
public AlterIndexStatement alterIndex(NamedTableNode indexTableNode, String dataTableName, boolean ifExists, PIndexState state, boolean isRebuildAll, boolean async, ListMultimap<String,Pair<String,Object>> props) {
return new AlterIndexStatement(indexTableNode, dataTableName, ifExists, state, isRebuildAll, async, props);
}
public AlterIndexStatement alterIndex(NamedTableNode indexTableNode, String dataTableName, boolean ifExists, PIndexState state) {
return new AlterIndexStatement(indexTableNode, dataTableName, ifExists, state, false, false);
}
public TraceStatement trace(boolean isTraceOn, double samplingRate) {
return new TraceStatement(isTraceOn, samplingRate);
}
public AlterSessionStatement alterSession(Map<String,Object> props) {
return new AlterSessionStatement(props);
}
public TableName table(String schemaName, String tableName) {
return TableName.createNormalized(schemaName,tableName);
}
public NamedNode indexName(String name) {
return new NamedNode(name);
}
@Deprecated
public NamedTableNode namedTable(String alias, TableName name) {
return new NamedTableNode(alias, name);
}
@Deprecated
public NamedTableNode namedTable(String alias, TableName name, List<ColumnDef> dyn_columns) {
return new NamedTableNode(alias, name,dyn_columns);
}
public NamedTableNode namedTable(String alias, TableName name, Double tableSamplingRate) {
return new NamedTableNode(alias, name, tableSamplingRate);
}
public NamedTableNode namedTable(String alias, TableName name, List<ColumnDef> dyn_columns, Double tableSamplingRate) {
return new NamedTableNode(alias, name,dyn_columns, tableSamplingRate);
}
public NamedTableNode namedTable(String alias, TableName name, List<ColumnDef> dyn_columns, LiteralParseNode tableSampleNode) {
Double tableSamplingRate;
if(tableSampleNode==null||tableSampleNode.getValue()==null){
tableSamplingRate=ConcreteTableNode.DEFAULT_TABLE_SAMPLING_RATE;
}else if(tableSampleNode.getValue() instanceof Integer){
tableSamplingRate=(double)((int)tableSampleNode.getValue());
}else{
tableSamplingRate=((BigDecimal) tableSampleNode.getValue()).doubleValue();
}
return new NamedTableNode(alias, name, dyn_columns, tableSamplingRate);
}
public BindTableNode bindTable(String alias, TableName name) {
return new BindTableNode(alias, name);
}
public CaseParseNode caseWhen(List<ParseNode> children) {
return new CaseParseNode(children);
}
public DivideParseNode divide(List<ParseNode> children) {
return new DivideParseNode(children);
}
public UpdateStatisticsStatement updateStatistics(NamedTableNode table, StatisticsCollectionScope scope, Map<String,Object> props) {
return new UpdateStatisticsStatement(table, scope, props);
}
public ExecuteUpgradeStatement executeUpgrade() {
return new ExecuteUpgradeStatement();
}
public FunctionParseNode functionDistinct(String name, List<ParseNode> args) {
if (CountAggregateFunction.NAME.equals(SchemaUtil.normalizeIdentifier(name))) {
BuiltInFunctionInfo info = getInfo(
SchemaUtil.normalizeIdentifier(DistinctCountAggregateFunction.NAME), args);
return new DistinctCountParseNode(DistinctCountAggregateFunction.NAME, args, info);
} else {
throw new UnsupportedOperationException("DISTINCT not supported with " + name);
}
}
public FunctionParseNode arrayElemRef(List<ParseNode> args) {
return function(ARRAY_ELEM, args);
}
public FunctionParseNode function(String name, List<ParseNode> args) {
BuiltInFunctionInfo info = getInfo(name, args);
if (info == null) {
return new UDFParseNode(name, args, info);
}
Constructor<? extends FunctionParseNode> ctor = info.getNodeCtor();
if (ctor == null) {
return info.isAggregate()
? new AggregateFunctionParseNode(name, args, info)
: new FunctionParseNode(name, args, info);
} else {
try {
return ctor.newInstance(name, args, info);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public FunctionParseNode function(String name, List<ParseNode> valueNodes,
List<ParseNode> columnNodes, boolean isAscending) {
List<ParseNode> args = Lists.newArrayListWithExpectedSize(columnNodes.size() + valueNodes.size() + 1);
args.addAll(columnNodes);
args.add(new LiteralParseNode(Boolean.valueOf(isAscending)));
args.addAll(valueNodes);
BuiltInFunctionInfo info = getInfo(name, args);
if(info==null) {
return new UDFParseNode(name,args,info);
}
Constructor<? extends FunctionParseNode> ctor = info.getNodeCtor();
if (ctor == null) {
return new AggregateFunctionWithinGroupParseNode(name, args, info);
} else {
try {
return ctor.newInstance(name, args, info);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public HintNode hint(String hint) {
return new HintNode(hint);
}
public InListParseNode inList(List<ParseNode> children, boolean negate) {
return new InListParseNode(children, negate);
}
public ExistsParseNode exists(ParseNode child, boolean negate) {
return new ExistsParseNode(child, negate);
}
public InParseNode in(ParseNode l, ParseNode r, boolean negate, boolean isSubqueryDistinct) {
return new InParseNode(l, r, negate, isSubqueryDistinct);
}
public IsNullParseNode isNull(ParseNode child, boolean negate) {
return new IsNullParseNode(child, negate);
}
public JoinTableNode join(JoinType type, TableNode lhs, TableNode rhs, ParseNode on, boolean singleValueOnly) {
return new JoinTableNode(type, lhs, rhs, on, singleValueOnly);
}
public DerivedTableNode derivedTable (String alias, SelectStatement select) {
return new DerivedTableNode(alias, select);
}
public LikeParseNode like(ParseNode lhs, ParseNode rhs, boolean negate, LikeType likeType) {
return new LikeParseNode(lhs, rhs, negate, likeType);
}
public LiteralParseNode literal(Object value) {
return new LiteralParseNode(value);
}
public LiteralParseNode realNumber(String text) {
return new LiteralParseNode(new BigDecimal(text, PDataType.DEFAULT_MATH_CONTEXT));
}
public LiteralParseNode wholeNumber(String text) {
int length = text.length();
// We know it'll fit into long, might still fit into int
if (length <= PDataType.LONG_PRECISION-1) {
long l = Long.parseLong(text);
if (l <= Integer.MAX_VALUE) {
// Fits into int
return new LiteralParseNode((int)l);
}
return new LiteralParseNode(l);
}
// Might still fit into long
BigDecimal d = new BigDecimal(text, PDataType.DEFAULT_MATH_CONTEXT);
if (d.compareTo(MAX_LONG) <= 0) {
return new LiteralParseNode(d.longValueExact());
}
// Doesn't fit into long
return new LiteralParseNode(d);
}
public LiteralParseNode intOrLong(String text) {
long l = Long.parseLong(text);
if (l <= Integer.MAX_VALUE) {
// Fits into int
return new LiteralParseNode((int)l);
}
return new LiteralParseNode(l);
}
public CastParseNode cast(ParseNode expression, String dataType, Integer maxLength, Integer scale) {
return new CastParseNode(expression, dataType, maxLength, scale, false);
}
public CastParseNode cast(ParseNode expression, PDataType dataType, Integer maxLength, Integer scale) {
return new CastParseNode(expression, dataType, maxLength, scale, false);
}
public CastParseNode cast(ParseNode expression, PDataType dataType, Integer maxLength, Integer scale, boolean arr) {
return new CastParseNode(expression, dataType, maxLength, scale, arr);
}
public CastParseNode cast(ParseNode expression, String dataType, Integer maxLength, Integer scale, boolean arr) {
return new CastParseNode(expression, dataType, maxLength, scale, arr);
}
public ParseNode rowValueConstructor(List<ParseNode> l) {
return new RowValueConstructorParseNode(l);
}
private void checkTypeMatch (PDataType expectedType, PDataType actualType) throws SQLException {
if (!expectedType.isCoercibleTo(actualType)) {
throw TypeMismatchException.newException(expectedType, actualType);
}
}
public LiteralParseNode literal(Object value, PDataType expectedType) throws SQLException {
PDataType actualType = PDataType.fromLiteral(value);
if (actualType != null && actualType != expectedType) {
checkTypeMatch(expectedType, actualType);
value = expectedType.toObject(value, actualType);
}
return new LiteralParseNode(value);
/*
Object typedValue = expectedType.toObject(value.toString());
return new LiteralParseNode(typedValue);
*/
}
public LiteralParseNode literal(String value, String sqlTypeName) throws SQLException {
PDataType expectedType = sqlTypeName == null ? null : PDataType.fromSqlTypeName(SchemaUtil.normalizeIdentifier(sqlTypeName));
if (expectedType == null || !expectedType.isCoercibleTo(PTimestamp.INSTANCE)) {
throw TypeMismatchException.newException(expectedType, PTimestamp.INSTANCE);
}
Object typedValue = expectedType.toObject(value);
return new LiteralParseNode(typedValue);
}
public LiteralParseNode coerce(LiteralParseNode literalNode, PDataType expectedType) throws SQLException {
PDataType actualType = literalNode.getType();
if (actualType != null) {
Object before = literalNode.getValue();
checkTypeMatch(expectedType, actualType);
Object after = expectedType.toObject(before, actualType);
if (before != after) {
literalNode = literal(after);
}
}
return literalNode;
}
public ComparisonParseNode comparison(CompareOp op, ParseNode lhs, ParseNode rhs) {
switch (op){
case LESS:
return lt(lhs,rhs);
case LESS_OR_EQUAL:
return lte(lhs,rhs);
case EQUAL:
return equal(lhs,rhs);
case NOT_EQUAL:
return notEqual(lhs,rhs);
case GREATER_OR_EQUAL:
return gte(lhs,rhs);
case GREATER:
return gt(lhs,rhs);
default:
throw new IllegalArgumentException("Unexpcted CompareOp of " + op);
}
}
public ArrayAnyComparisonNode arrayAny(ParseNode rhs, ComparisonParseNode compareNode) {
return new ArrayAnyComparisonNode(rhs, compareNode);
}
public ArrayAllComparisonNode arrayAll(ParseNode rhs, ComparisonParseNode compareNode) {
return new ArrayAllComparisonNode(rhs, compareNode);
}
public ArrayAnyComparisonNode wrapInAny(CompareOp op, ParseNode lhs, ParseNode rhs) {
return new ArrayAnyComparisonNode(rhs, comparison(op, lhs, elementRef(Arrays.<ParseNode>asList(rhs, literal(1)))));
}
public ArrayAllComparisonNode wrapInAll(CompareOp op, ParseNode lhs, ParseNode rhs) {
return new ArrayAllComparisonNode(rhs, comparison(op, lhs, elementRef(Arrays.<ParseNode>asList(rhs, literal(1)))));
}
public ArrayElemRefNode elementRef(List<ParseNode> parseNode) {
return new ArrayElemRefNode(parseNode);
}
public GreaterThanParseNode gt(ParseNode lhs, ParseNode rhs) {
return new GreaterThanParseNode(lhs, rhs);
}
public GreaterThanOrEqualParseNode gte(ParseNode lhs, ParseNode rhs) {
return new GreaterThanOrEqualParseNode(lhs, rhs);
}
public LessThanParseNode lt(ParseNode lhs, ParseNode rhs) {
return new LessThanParseNode(lhs, rhs);
}
public LessThanOrEqualParseNode lte(ParseNode lhs, ParseNode rhs) {
return new LessThanOrEqualParseNode(lhs, rhs);
}
public EqualParseNode equal(ParseNode lhs, ParseNode rhs) {
return new EqualParseNode(lhs, rhs);
}
public ArrayConstructorNode upsertStmtArrayNode(List<ParseNode> upsertStmtArray) {
return new ArrayConstructorNode(upsertStmtArray);
}
public ParseNode negate(ParseNode child) {
// Prevents reparsing of -1 from becoming 1*-1 and 1*1*-1 with each re-parsing
if (LiteralParseNode.ONE.equals(child) && ((LiteralParseNode)child).getType().isCoercibleTo(
PLong.INSTANCE)) {
return LiteralParseNode.MINUS_ONE;
}
// Special case to convert Long.MIN_VALUE back to a Long. We can't initially represent it
// as a Long in the parser because we only represent positive values as constants in the
// parser, and ABS(Long.MIN_VALUE) is too big to fit into a Long. So we convert it back here.
if (LiteralParseNode.MIN_LONG_AS_BIG_DECIMAL.equals(child)) {
return LiteralParseNode.MIN_LONG;
}
return new MultiplyParseNode(Arrays.asList(child,LiteralParseNode.MINUS_ONE));
}
public NotEqualParseNode notEqual(ParseNode lhs, ParseNode rhs) {
return new NotEqualParseNode(lhs, rhs);
}
public ParseNode not(ParseNode child) {
if (child instanceof ExistsParseNode) {
return exists(child.getChildren().get(0), !((ExistsParseNode) child).isNegate());
}
return new NotParseNode(child);
}
public OrParseNode or(List<ParseNode> children) {
return new OrParseNode(children);
}
public OrderByNode orderBy(ParseNode expression, boolean nullsLast, boolean orderAscending) {
return new OrderByNode(expression, nullsLast, orderAscending);
}
public SelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, OffsetNode offset, int bindCount, boolean isAggregate,
boolean hasSequence, List<SelectStatement> selects, Map<String, UDFParseNode> udfParseNodes) {
return new SelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy, having,
orderBy == null ? Collections.<OrderByNode>emptyList() : orderBy, limit, offset, bindCount, isAggregate, hasSequence, selects == null ? Collections.<SelectStatement>emptyList() : selects, udfParseNodes);
}
public UpsertStatement upsert(NamedTableNode table, HintNode hint, List<ColumnName> columns, List<ParseNode> values,
SelectStatement select, int bindCount,
Map<String, UDFParseNode> udfParseNodes,
List<Pair<ColumnName,ParseNode>> onDupKeyPairs) {
return new UpsertStatement(table, hint, columns, values, select, bindCount, udfParseNodes, onDupKeyPairs);
}
public CursorName cursorName(String name){
return new CursorName(name);
}
public DeclareCursorStatement declareCursor(CursorName cursor, SelectStatement select){
return new DeclareCursorStatement(cursor, select);
}
public FetchStatement fetch(CursorName cursor, boolean isNext, int fetchLimit){
return new FetchStatement(cursor, isNext, fetchLimit);
}
public OpenStatement open(CursorName cursor){
return new OpenStatement(cursor);
}
public CloseStatement close(CursorName cursor){
return new CloseStatement(cursor);
}
public DeleteStatement delete(NamedTableNode table, HintNode hint, ParseNode node, List<OrderByNode> orderBy, LimitNode limit, int bindCount, Map<String, UDFParseNode> udfParseNodes) {
return new DeleteStatement(table, hint, node, orderBy, limit, bindCount, udfParseNodes);
}
public SelectStatement select(SelectStatement statement, ParseNode where) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(), statement.getHaving(),
statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, ParseNode where, ParseNode having) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(), having,
statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, List<AliasedNode> select, ParseNode where, List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(),
select, where, groupBy, having, orderBy, statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, TableNode table) {
return select(table, statement.getHint(), statement.isDistinct(), statement.getSelect(), statement.getWhere(), statement.getGroupBy(),
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(),
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, TableNode table, ParseNode where) {
return select(table, statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(),
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(),
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, boolean isDistinct, List<AliasedNode> select) {
return select(statement.getFrom(), statement.getHint(), isDistinct, select, statement.getWhere(), statement.getGroupBy(),
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(),
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, boolean isDistinct, List<AliasedNode> select, ParseNode where) {
return select(statement.getFrom(), statement.getHint(), isDistinct, select, where, statement.getGroupBy(),
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(),
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, boolean isDistinct, List<AliasedNode> select, ParseNode where, List<ParseNode> groupBy, boolean isAggregate) {
return select(statement.getFrom(), statement.getHint(), isDistinct, select, where, groupBy,
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), isAggregate,
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, statement.getLimit(),
statement.getOffset(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, HintNode hint) {
return hint == null || hint.isEmpty() ? statement : select(statement.getFrom(), hint, statement.isDistinct(), statement.getSelect(),
statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(),
statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, HintNode hint, ParseNode where) {
return select(statement.getFrom(), hint, statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(),
statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getOffset(), statement.getBindCount(), statement.isAggregate(),
statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy, LimitNode limit, OffsetNode offset, int bindCount, boolean isAggregate) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, limit, offset,
bindCount, isAggregate || statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, LimitNode limit) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), limit,
statement.getOffset(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(),
statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy, LimitNode limit, OffsetNode offset) {
return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, limit,offset,
statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects(), statement.getUdfParseNodes());
}
public SelectStatement select(List<SelectStatement> statements, List<OrderByNode> orderBy, LimitNode limit,
OffsetNode offset, int bindCount, boolean isAggregate) {
if (statements.size() == 1) return select(statements.get(0), orderBy, limit, offset, bindCount, isAggregate);
// Get a list of adjusted aliases from a non-wildcard sub-select if any.
// We do not check the number of select nodes among all sub-selects, as
// it will be done later at compile stage. Empty or different aliases
// are ignored, since they cannot be referred by outer queries.
List<String> aliases = Lists.<String> newArrayList();
Map<String, UDFParseNode> udfParseNodes = new HashMap<String, UDFParseNode>(1);
for (int i = 0; i < statements.size() && aliases.isEmpty(); i++) {
SelectStatement subselect = statements.get(i);
udfParseNodes.putAll(subselect.getUdfParseNodes());
if (!subselect.hasWildcard()) {
for (AliasedNode aliasedNode : subselect.getSelect()) {
String alias = aliasedNode.getAlias();
if (alias == null) {
alias = SchemaUtil.normalizeIdentifier(aliasedNode.getNode().getAlias());
}
aliases.add(alias == null ? createTempAlias() : alias);
}
}
}
List<AliasedNode> aliasedNodes;
if (aliases.isEmpty()) {
aliasedNodes = Lists.newArrayList(aliasedNode(null, wildcard()));
} else {
aliasedNodes = Lists.newArrayListWithExpectedSize(aliases.size());
for (String alias : aliases) {
aliasedNodes.add(aliasedNode(alias, column(null, alias, alias)));
}
}
return select(null, HintNode.EMPTY_HINT_NODE, false, aliasedNodes,
null, null, null, orderBy, limit,offset, bindCount, false, false, statements, udfParseNodes);
}
public SubqueryParseNode subquery(SelectStatement select, boolean expectSingleRow) {
return new SubqueryParseNode(select, expectSingleRow);
}
public LimitNode limit(BindParseNode b) {
return new LimitNode(b);
}
public LimitNode limit(LiteralParseNode l) {
return new LimitNode(l);
}
public OffsetNode offset(BindParseNode b) {
return new OffsetNode(b);
}
public OffsetNode offset(LiteralParseNode l) {
return new OffsetNode(l);
}
public DropSchemaStatement dropSchema(String schemaName, boolean ifExists, boolean cascade) {
return new DropSchemaStatement(schemaName, ifExists, cascade);
}
public UseSchemaStatement useSchema(String schemaName) {
return new UseSchemaStatement(schemaName);
}
public ChangePermsStatement changePermsStatement(String permsString, boolean isSchemaName, TableName tableName
, String schemaName, boolean isGroupName, LiteralParseNode userOrGroup, boolean isGrantStatement) {
return new ChangePermsStatement(permsString, isSchemaName, tableName, schemaName, isGroupName, userOrGroup, isGrantStatement);
}
}
| |
package org.apereo.cas.audit.spi.config;
import org.apereo.cas.audit.AuditPrincipalIdProvider;
import org.apereo.cas.audit.AuditTrailConstants;
import org.apereo.cas.audit.AuditTrailExecutionPlan;
import org.apereo.cas.audit.AuditTrailExecutionPlanConfigurer;
import org.apereo.cas.audit.AuditTrailRecordResolutionPlan;
import org.apereo.cas.audit.AuditTrailRecordResolutionPlanConfigurer;
import org.apereo.cas.audit.spi.plan.DefaultAuditTrailExecutionPlan;
import org.apereo.cas.audit.spi.plan.DefaultAuditTrailRecordResolutionPlan;
import org.apereo.cas.audit.spi.principal.ChainingAuditPrincipalIdProvider;
import org.apereo.cas.audit.spi.principal.ThreadLocalPrincipalResolver;
import org.apereo.cas.audit.spi.resource.CredentialsAsFirstParameterResourceResolver;
import org.apereo.cas.audit.spi.resource.MessageBundleAwareResourceResolver;
import org.apereo.cas.audit.spi.resource.NullableReturnValueAuditResourceResolver;
import org.apereo.cas.audit.spi.resource.ServiceAccessEnforcementAuditResourceResolver;
import org.apereo.cas.audit.spi.resource.ServiceResourceResolver;
import org.apereo.cas.audit.spi.resource.ShortenedReturnValueAsStringResourceResolver;
import org.apereo.cas.audit.spi.resource.TicketAsFirstParameterResourceResolver;
import org.apereo.cas.audit.spi.resource.TicketValidationResourceResolver;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.util.CollectionUtils;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.apereo.inspektr.audit.AuditTrailManagementAspect;
import org.apereo.inspektr.audit.spi.AuditActionResolver;
import org.apereo.inspektr.audit.spi.AuditResourceResolver;
import org.apereo.inspektr.audit.spi.support.DefaultAuditActionResolver;
import org.apereo.inspektr.audit.support.AbstractStringAuditTrailManager;
import org.apereo.inspektr.audit.support.Slf4jLoggingAuditTrailManager;
import org.apereo.inspektr.common.spi.PrincipalResolver;
import org.apereo.inspektr.common.web.ClientInfoThreadLocalFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This is {@link CasCoreAuditConfiguration}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("casCoreAuditConfiguration")
@EnableAspectJAutoProxy
@EnableConfigurationProperties(CasConfigurationProperties.class)
@Slf4j
public class CasCoreAuditConfiguration implements AuditTrailExecutionPlanConfigurer, AuditTrailRecordResolutionPlanConfigurer {
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
private ApplicationContext applicationContext;
@Bean
public AuditTrailManagementAspect auditTrailManagementAspect(@Qualifier("auditTrailExecutionPlan") final AuditTrailExecutionPlan auditTrailExecutionPlan,
@Qualifier("auditTrailRecordResolutionPlan") final AuditTrailRecordResolutionPlan auditTrailRecordResolutionPlan) {
val aspect = new AuditTrailManagementAspect(
casProperties.getAudit().getAppCode(),
auditablePrincipalResolver(auditPrincipalIdProvider()),
auditTrailExecutionPlan.getAuditTrailManagers(), auditTrailRecordResolutionPlan.getAuditActionResolvers(),
auditTrailRecordResolutionPlan.getAuditResourceResolvers());
aspect.setFailOnAuditFailures(!casProperties.getAudit().isIgnoreAuditFailures());
return aspect;
}
@Autowired
@ConditionalOnMissingBean(name = "auditTrailRecordResolutionPlan")
@Bean
public AuditTrailRecordResolutionPlan auditTrailRecordResolutionPlan(final List<AuditTrailRecordResolutionPlanConfigurer> configurers) {
val plan = new DefaultAuditTrailRecordResolutionPlan();
configurers.forEach(c -> {
val name = StringUtils.removePattern(c.getClass().getSimpleName(), "\\$.+");
LOGGER.debug("Registering audit trail manager [{}]", name);
c.configureAuditTrailRecordResolutionPlan(plan);
});
return plan;
}
@Autowired
@ConditionalOnMissingBean(name = "auditTrailExecutionPlan")
@Bean
public AuditTrailExecutionPlan auditTrailExecutionPlan(final List<AuditTrailExecutionPlanConfigurer> configurers) {
val plan = new DefaultAuditTrailExecutionPlan();
configurers.forEach(c -> {
val name = StringUtils.removePattern(c.getClass().getSimpleName(), "\\$.+");
LOGGER.debug("Registering audit trail manager [{}]", name);
c.configureAuditTrailExecutionPlan(plan);
});
return plan;
}
@Bean
public FilterRegistrationBean casClientInfoLoggingFilter() {
val audit = casProperties.getAudit();
val bean = new FilterRegistrationBean();
bean.setFilter(new ClientInfoThreadLocalFilter());
bean.setUrlPatterns(CollectionUtils.wrap("/*"));
bean.setName("CAS Client Info Logging Filter");
bean.setAsyncSupported(true);
bean.setOrder(Ordered.HIGHEST_PRECEDENCE);
val initParams = new HashMap<String, String>();
if (StringUtils.isNotBlank(audit.getAlternateClientAddrHeaderName())) {
initParams.put(ClientInfoThreadLocalFilter.CONST_IP_ADDRESS_HEADER, audit.getAlternateClientAddrHeaderName());
}
if (StringUtils.isNotBlank(audit.getAlternateServerAddrHeaderName())) {
initParams.put(ClientInfoThreadLocalFilter.CONST_SERVER_IP_ADDRESS_HEADER, audit.getAlternateServerAddrHeaderName());
}
initParams.put(ClientInfoThreadLocalFilter.CONST_USE_SERVER_HOST_ADDRESS, String.valueOf(audit.isUseServerHostAddress()));
bean.setInitParameters(initParams);
return bean;
}
@ConditionalOnMissingBean(name = "authenticationActionResolver")
@Bean
public AuditActionResolver authenticationActionResolver() {
return new DefaultAuditActionResolver(AuditTrailConstants.AUDIT_ACTION_POSTFIX_SUCCESS,
AuditTrailConstants.AUDIT_ACTION_POSTFIX_FAILED);
}
@ConditionalOnMissingBean(name = "ticketCreationActionResolver")
@Bean
public AuditActionResolver ticketCreationActionResolver() {
return new DefaultAuditActionResolver(AuditTrailConstants.AUDIT_ACTION_POSTFIX_CREATED, "_NOT_CREATED");
}
@ConditionalOnMissingBean(name = "ticketValidationActionResolver")
@Bean
public AuditActionResolver ticketValidationActionResolver() {
return new DefaultAuditActionResolver(AuditTrailConstants.AUDIT_ACTION_POSTFIX_SUCCESS, AuditTrailConstants.AUDIT_ACTION_POSTFIX_FAILED);
}
@ConditionalOnMissingBean(name = "returnValueResourceResolver")
@Bean
public AuditResourceResolver returnValueResourceResolver() {
return new ShortenedReturnValueAsStringResourceResolver();
}
@ConditionalOnMissingBean(name = "nullableReturnValueResourceResolver")
@Bean
public AuditResourceResolver nullableReturnValueResourceResolver() {
return new NullableReturnValueAuditResourceResolver(returnValueResourceResolver());
}
@ConditionalOnMissingBean(name = "serviceAccessEnforcementAuditResourceResolver")
@Bean
public ServiceAccessEnforcementAuditResourceResolver serviceAccessEnforcementAuditResourceResolver() {
return new ServiceAccessEnforcementAuditResourceResolver();
}
/**
* Extension point for deployers to define custom AuditActionResolvers to extend the stock resolvers.
*
* @return the map
*/
@ConditionalOnMissingBean(name = "customAuditActionResolverMap")
@Bean
public Map<String, AuditActionResolver> customAuditActionResolverMap() {
return new HashMap<>(0);
}
/**
* Extension point for deployers to define custom AuditResourceResolvers to extend the stock resolvers.
*
* @return the map
*/
@ConditionalOnMissingBean(name = "customAuditResourceResolverMap")
@Bean
public Map<String, AuditResourceResolver> customAuditResourceResolverMap() {
return new HashMap<>(0);
}
@ConditionalOnMissingBean(name = "auditablePrincipalResolver")
@Bean
public PrincipalResolver auditablePrincipalResolver(@Qualifier("auditPrincipalIdProvider") final AuditPrincipalIdProvider auditPrincipalIdProvider) {
return new ThreadLocalPrincipalResolver(auditPrincipalIdProvider);
}
@ConditionalOnMissingBean(name = "ticketResourceResolver")
@Bean
public AuditResourceResolver ticketResourceResolver() {
return new TicketAsFirstParameterResourceResolver();
}
@ConditionalOnMissingBean(name = "ticketValidationResourceResolver")
@Bean
public AuditResourceResolver ticketValidationResourceResolver() {
val audit = casProperties.getAudit();
if (audit.isIncludeValidationAssertion()) {
return new TicketValidationResourceResolver();
}
return ticketResourceResolver();
}
@ConditionalOnMissingBean(name = "messageBundleAwareResourceResolver")
@Bean
public AuditResourceResolver messageBundleAwareResourceResolver() {
return new MessageBundleAwareResourceResolver(applicationContext);
}
@ConditionalOnMissingBean(name = "auditPrincipalIdProvider")
@Bean
public AuditPrincipalIdProvider auditPrincipalIdProvider() {
val resolvers = applicationContext.getBeansOfType(AuditPrincipalIdProvider.class, false, true);
val providers = new ArrayList<>(resolvers.values());
AnnotationAwareOrderComparator.sort(providers);
return new ChainingAuditPrincipalIdProvider(providers);
}
@Override
public void configureAuditTrailExecutionPlan(final AuditTrailExecutionPlan plan) {
val audit = casProperties.getAudit().getSlf4j();
val slf4j = new Slf4jLoggingAuditTrailManager();
slf4j.setUseSingleLine(audit.isUseSingleLine());
slf4j.setEntrySeparator(audit.getSinglelineSeparator());
slf4j.setAuditFormat(AbstractStringAuditTrailManager.AuditFormats.valueOf(audit.getAuditFormat().toUpperCase()));
plan.registerAuditTrailManager(slf4j);
}
@Override
public void configureAuditTrailRecordResolutionPlan(final AuditTrailRecordResolutionPlan plan) {
/*
Add audit action resolvers here.
*/
val resolver = authenticationActionResolver();
plan.registerAuditActionResolver("AUTHENTICATION_RESOLVER", resolver);
plan.registerAuditActionResolver("SAVE_SERVICE_ACTION_RESOLVER", resolver);
val defResolver = new DefaultAuditActionResolver();
plan.registerAuditActionResolver("DESTROY_TICKET_GRANTING_TICKET_RESOLVER", defResolver);
plan.registerAuditActionResolver("DESTROY_PROXY_GRANTING_TICKET_RESOLVER", defResolver);
val cResolver = ticketCreationActionResolver();
plan.registerAuditActionResolver("CREATE_PROXY_GRANTING_TICKET_RESOLVER", cResolver);
plan.registerAuditActionResolver("GRANT_SERVICE_TICKET_RESOLVER", cResolver);
plan.registerAuditActionResolver("GRANT_PROXY_TICKET_RESOLVER", cResolver);
plan.registerAuditActionResolver("CREATE_TICKET_GRANTING_TICKET_RESOLVER", cResolver);
val authResolver = new DefaultAuditActionResolver(AuditTrailConstants.AUDIT_ACTION_POSTFIX_TRIGGERED, StringUtils.EMPTY);
plan.registerAuditActionResolver("AUTHENTICATION_EVENT_ACTION_RESOLVER", authResolver);
plan.registerAuditActionResolver("VALIDATE_SERVICE_TICKET_RESOLVER", ticketValidationActionResolver());
val serviceAccessResolver = new DefaultAuditActionResolver(AuditTrailConstants.AUDIT_ACTION_POSTFIX_TRIGGERED, StringUtils.EMPTY);
plan.registerAuditActionResolver("SERVICE_ACCESS_ENFORCEMENT_ACTION_RESOLVER", serviceAccessResolver);
/*
Add audit resource resolvers here.
*/
plan.registerAuditResourceResolver("AUTHENTICATION_RESOURCE_RESOLVER", new CredentialsAsFirstParameterResourceResolver());
val messageBundleAwareResourceResolver = messageBundleAwareResourceResolver();
plan.registerAuditResourceResolver("CREATE_TICKET_GRANTING_TICKET_RESOURCE_RESOLVER", messageBundleAwareResourceResolver);
plan.registerAuditResourceResolver("CREATE_PROXY_GRANTING_TICKET_RESOURCE_RESOLVER", messageBundleAwareResourceResolver);
val ticketResourceResolver = ticketResourceResolver();
plan.registerAuditResourceResolver("DESTROY_TICKET_GRANTING_TICKET_RESOURCE_RESOLVER", ticketResourceResolver);
plan.registerAuditResourceResolver("DESTROY_PROXY_GRANTING_TICKET_RESOURCE_RESOLVER", ticketResourceResolver);
plan.registerAuditResourceResolver("GRANT_SERVICE_TICKET_RESOURCE_RESOLVER", new ServiceResourceResolver());
plan.registerAuditResourceResolver("GRANT_PROXY_TICKET_RESOURCE_RESOLVER", new ServiceResourceResolver());
plan.registerAuditResourceResolver("VALIDATE_SERVICE_TICKET_RESOURCE_RESOLVER", ticketValidationResourceResolver());
plan.registerAuditResourceResolver("SAVE_SERVICE_RESOURCE_RESOLVER", returnValueResourceResolver());
plan.registerAuditResourceResolver("AUTHENTICATION_EVENT_RESOURCE_RESOLVER", nullableReturnValueResourceResolver());
plan.registerAuditResourceResolver("SERVICE_ACCESS_ENFORCEMENT_RESOURCE_RESOLVER", serviceAccessEnforcementAuditResourceResolver());
/*
Add custom resolvers here.
*/
plan.registerAuditActionResolvers(customAuditActionResolverMap());
plan.registerAuditResourceResolvers(customAuditResourceResolverMap());
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/spanner/admin/database/v1/spanner_database_admin.proto
package com.google.spanner.admin.database.v1;
/**
*
*
* <pre>
* The response for [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl].
* </pre>
*
* Protobuf type {@code google.spanner.admin.database.v1.GetDatabaseDdlResponse}
*/
public final class GetDatabaseDdlResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.spanner.admin.database.v1.GetDatabaseDdlResponse)
GetDatabaseDdlResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetDatabaseDdlResponse.newBuilder() to construct.
private GetDatabaseDdlResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetDatabaseDdlResponse() {
statements_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetDatabaseDdlResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private GetDatabaseDdlResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
statements_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
statements_.add(s);
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
statements_ = statements_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.spanner.admin.database.v1.SpannerDatabaseAdminProto
.internal_static_google_spanner_admin_database_v1_GetDatabaseDdlResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.spanner.admin.database.v1.SpannerDatabaseAdminProto
.internal_static_google_spanner_admin_database_v1_GetDatabaseDdlResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.class,
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.Builder.class);
}
public static final int STATEMENTS_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList statements_;
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @return A list containing the statements.
*/
public com.google.protobuf.ProtocolStringList getStatementsList() {
return statements_;
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @return The count of statements.
*/
public int getStatementsCount() {
return statements_.size();
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param index The index of the element to return.
* @return The statements at the given index.
*/
public java.lang.String getStatements(int index) {
return statements_.get(index);
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the statements at the given index.
*/
public com.google.protobuf.ByteString getStatementsBytes(int index) {
return statements_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < statements_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, statements_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < statements_.size(); i++) {
dataSize += computeStringSizeNoTag(statements_.getRaw(i));
}
size += dataSize;
size += 1 * getStatementsList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.spanner.admin.database.v1.GetDatabaseDdlResponse)) {
return super.equals(obj);
}
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse other =
(com.google.spanner.admin.database.v1.GetDatabaseDdlResponse) obj;
if (!getStatementsList().equals(other.getStatementsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getStatementsCount() > 0) {
hash = (37 * hash) + STATEMENTS_FIELD_NUMBER;
hash = (53 * hash) + getStatementsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl].
* </pre>
*
* Protobuf type {@code google.spanner.admin.database.v1.GetDatabaseDdlResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.spanner.admin.database.v1.GetDatabaseDdlResponse)
com.google.spanner.admin.database.v1.GetDatabaseDdlResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.spanner.admin.database.v1.SpannerDatabaseAdminProto
.internal_static_google_spanner_admin_database_v1_GetDatabaseDdlResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.spanner.admin.database.v1.SpannerDatabaseAdminProto
.internal_static_google_spanner_admin_database_v1_GetDatabaseDdlResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.class,
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.Builder.class);
}
// Construct using com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
statements_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.spanner.admin.database.v1.SpannerDatabaseAdminProto
.internal_static_google_spanner_admin_database_v1_GetDatabaseDdlResponse_descriptor;
}
@java.lang.Override
public com.google.spanner.admin.database.v1.GetDatabaseDdlResponse getDefaultInstanceForType() {
return com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.spanner.admin.database.v1.GetDatabaseDdlResponse build() {
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.spanner.admin.database.v1.GetDatabaseDdlResponse buildPartial() {
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse result =
new com.google.spanner.admin.database.v1.GetDatabaseDdlResponse(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
statements_ = statements_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.statements_ = statements_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.spanner.admin.database.v1.GetDatabaseDdlResponse) {
return mergeFrom((com.google.spanner.admin.database.v1.GetDatabaseDdlResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.spanner.admin.database.v1.GetDatabaseDdlResponse other) {
if (other == com.google.spanner.admin.database.v1.GetDatabaseDdlResponse.getDefaultInstance())
return this;
if (!other.statements_.isEmpty()) {
if (statements_.isEmpty()) {
statements_ = other.statements_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureStatementsIsMutable();
statements_.addAll(other.statements_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.spanner.admin.database.v1.GetDatabaseDdlResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.spanner.admin.database.v1.GetDatabaseDdlResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringList statements_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureStatementsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
statements_ = new com.google.protobuf.LazyStringArrayList(statements_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @return A list containing the statements.
*/
public com.google.protobuf.ProtocolStringList getStatementsList() {
return statements_.getUnmodifiableView();
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @return The count of statements.
*/
public int getStatementsCount() {
return statements_.size();
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param index The index of the element to return.
* @return The statements at the given index.
*/
public java.lang.String getStatements(int index) {
return statements_.get(index);
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the statements at the given index.
*/
public com.google.protobuf.ByteString getStatementsBytes(int index) {
return statements_.getByteString(index);
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param index The index to set the value at.
* @param value The statements to set.
* @return This builder for chaining.
*/
public Builder setStatements(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureStatementsIsMutable();
statements_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param value The statements to add.
* @return This builder for chaining.
*/
public Builder addStatements(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureStatementsIsMutable();
statements_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param values The statements to add.
* @return This builder for chaining.
*/
public Builder addAllStatements(java.lang.Iterable<java.lang.String> values) {
ensureStatementsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, statements_);
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearStatements() {
statements_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of formatted DDL statements defining the schema of the database
* specified in the request.
* </pre>
*
* <code>repeated string statements = 1;</code>
*
* @param value The bytes of the statements to add.
* @return This builder for chaining.
*/
public Builder addStatementsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureStatementsIsMutable();
statements_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.spanner.admin.database.v1.GetDatabaseDdlResponse)
}
// @@protoc_insertion_point(class_scope:google.spanner.admin.database.v1.GetDatabaseDdlResponse)
private static final com.google.spanner.admin.database.v1.GetDatabaseDdlResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.spanner.admin.database.v1.GetDatabaseDdlResponse();
}
public static com.google.spanner.admin.database.v1.GetDatabaseDdlResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetDatabaseDdlResponse> PARSER =
new com.google.protobuf.AbstractParser<GetDatabaseDdlResponse>() {
@java.lang.Override
public GetDatabaseDdlResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetDatabaseDdlResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetDatabaseDdlResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetDatabaseDdlResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.spanner.admin.database.v1.GetDatabaseDdlResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Stepan M. Mishura
* @version $Revision$
*/
package org.apache.harmony.security.tests.asn1.der;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Arrays;
import junit.framework.TestCase;
import org.apache.harmony.security.asn1.ASN1BitString;
import org.apache.harmony.security.asn1.ASN1Exception;
import org.apache.harmony.security.asn1.BitString;
import org.apache.harmony.security.asn1.DerInputStream;
import org.apache.harmony.security.asn1.DerOutputStream;
import org.apache.harmony.security.asn1.ASN1BitString.ASN1NamedBitList;
/**
* ASN.1 DER test for Bitstring type
*
* @see http://asn1.elibel.tm.fr/en/standards/index.htm
*/
public class BitStringTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(BitStringTest.class);
}
private static Object[][] validBitstring = new Object[][] {
//bitstring array format: bitstring object/ byte array
//
{ new BitString(new byte[] {}, 0), // object
new byte[] { 0x03, 0x01, 0x00 } },
//
{ new BitString(new byte[] { 0x05 }, 0), // object
new byte[] { 0x03, 0x02, 0x00, 0x05 } },
//
{ new BitString(new byte[] { (byte) 0x80 }, 7), // object
new byte[] { 0x03, 0x02, 0x07, (byte) 0x80 } } };
public void testDecode_Encode() throws IOException {
// decoder/encoder for testing
ASN1BitString asn1 = ASN1BitString.getInstance();
// decode from byte array
for (int i = 0; i < validBitstring.length; i++) {
DerInputStream in = new DerInputStream(
(byte[]) validBitstring[i][1]);
BitString expected = (BitString) validBitstring[i][0];
BitString decoded = (BitString) asn1.decode(in);
assertEquals("Testcase: " + i, expected.unusedBits,
decoded.unusedBits);
assertTrue("Testcase: " + i, Arrays.equals(expected.bytes,
decoded.bytes));
}
// decode from input stream
for (int i = 0; i < validBitstring.length; i++) {
DerInputStream in = new DerInputStream(new ByteArrayInputStream(
(byte[]) validBitstring[i][1]));
BitString expected = (BitString) validBitstring[i][0];
BitString decoded = (BitString) asn1.decode(in);
assertEquals("Testcase: " + i, expected.unusedBits,
decoded.unusedBits);
assertTrue("Testcase: " + i, Arrays.equals(expected.bytes,
decoded.bytes));
}
// encoding
for (int i = 0; i < validBitstring.length; i++) {
DerOutputStream out = new DerOutputStream(asn1,
validBitstring[i][0]);
assertTrue("Testcase: " + i, Arrays.equals(
(byte[]) validBitstring[i][1], out.encoded));
}
}
public void testDecode_Invalid() throws IOException {
byte[][] invalid = new byte[][] {
// wrong tag: tag is not 0x03
new byte[] { 0x02, 0x01, 0x00 },
// wrong length: length is 0
new byte[] { 0x03, 0x00 },
// wrong content: unused bits value > 7
new byte[] { 0x03, 0x03, 0x09, 0x0F, 0x0F },
// wrong content: not 0 unused bits for empty string
new byte[] { 0x03, 0x01, 0x01 },
// wrong content: unused bits in final octet are not 0
new byte[] { 0x03, 0x02, 0x01, 0x01 },
// wrong content: constructed encoding
new byte[] { 0x23, 0x03, 0x03, 0x01, 0x00 } };
for (int i = 0; i < invalid.length; i++) {
try {
DerInputStream in = new DerInputStream(invalid[i]);
ASN1BitString.getInstance().decode(in);
fail("No expected ASN1Exception for: " + i);
} catch (ASN1Exception e) {
}
}
}
//
//
// Named Bit List
//
//
public void testDecodeNamedBitList() throws IOException {
Object[][] testcaseBoolean = new Object[][] {
// bitstring array format: bitstring object/ byte array
//
{ new boolean[] {}, // object
new byte[] { 0x03, 0x01, 0x00 } },
//
{ new boolean[] { true }, // object
new byte[] { 0x03, 0x02, 0x07, (byte) 0x80 } },
//
{ new boolean[] { true, false, true }, // object
new byte[] { 0x03, 0x02, 0x05, (byte) 0xA0 } },
//
{
new boolean[] { true, true, true, true, true, true,
true, true }, // object
new byte[] { 0x03, 0x02, 0x00, (byte) 0xFF } },
//
{
new boolean[] { false, false, false, false, false,
false, false, false, true }, // object
new byte[] { 0x03, 0x03, 0x07, 0x00, (byte) 0x80 } } };
ASN1NamedBitList decoder = new ASN1NamedBitList();
for (int i = 0; i < testcaseBoolean.length; i++) {
DerInputStream in = new DerInputStream(
(byte[]) testcaseBoolean[i][1]);
assertTrue("Testcase: " + i, Arrays.equals(
(boolean[]) testcaseBoolean[i][0], (boolean[]) decoder
.decode(in)));
}
}
public void testDecodeNamedBitList_SizeConstraints() throws IOException {
Object[][] testcaseBoolean = new Object[][] {
//bitstring array format: bitstring object/ byte array
//
{
new boolean[] { false, false, false, false, false,
false, false, false }, // object
new byte[] { 0x03, 0x01, 0x00 } },
//
{
new boolean[] { true, false, false, false, false,
false, false, false }, // object
new byte[] { 0x03, 0x02, 0x07, (byte) 0x80 } },
//
{
new boolean[] { true, false, true, false, false, false,
false, false }, // object
new byte[] { 0x03, 0x02, 0x05, (byte) 0xA0 } },
//
{
new boolean[] { true, true, true, true, true, true,
true, true }, // object
new byte[] { 0x03, 0x02, 0x00, (byte) 0xFF } },
//
{
new boolean[] { false, false, false, false, false,
false, false, false, true }, // object
new byte[] { 0x03, 0x03, 0x07, 0x00, (byte) 0x80 } } };
ASN1NamedBitList decoder = new ASN1NamedBitList(8);
for (int i = 0; i < testcaseBoolean.length; i++) {
DerInputStream in = new DerInputStream(
(byte[]) testcaseBoolean[i][1]);
assertTrue("Testcase: " + i, Arrays.equals(
(boolean[]) testcaseBoolean[i][0], (boolean[]) decoder
.decode(in)));
}
}
public void testEncodeNamedBitList() throws IOException {
Object[][] testcaseBoolean = new Object[][] {
//bitstring array format: bitstring object/ byte array
//
{ new boolean[] {}, // object
new byte[] { 0x03, 0x01, 0x00 } },
//
{ new boolean[] { false }, // object
new byte[] { 0x03, 0x01, 0x00 } },
//
{ new boolean[] { true }, // object
new byte[] { 0x03, 0x02, 0x07, (byte) 0x80 } },
//
{ new boolean[] { true, false, true }, // object
new byte[] { 0x03, 0x02, 0x05, (byte) 0xA0 } },
//
{
new boolean[] { true, true, true, true, true, true,
true, true }, // object
new byte[] { 0x03, 0x02, 0x00, (byte) 0xFF } },
//
{
new boolean[] { false, false, false, false, false,
false, false, false, true }, // object
new byte[] { 0x03, 0x03, 0x07, 0x00, (byte) 0x80 } } };
ASN1NamedBitList encoder = new ASN1NamedBitList();
for (int i = 0; i < testcaseBoolean.length; i++) {
DerOutputStream out = new DerOutputStream(encoder,
testcaseBoolean[i][0]);
assertTrue("Testcase: " + i, Arrays.equals(
(byte[]) testcaseBoolean[i][1], out.encoded));
}
}
}
| |
package org.hisp.dhis.query;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.query.operators.MatchMode;
import org.hisp.dhis.schema.Schema;
import org.hisp.dhis.schema.SchemaService;
import org.jfree.data.time.Year;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Collection;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class CriteriaQueryEngineTest
extends DhisSpringTest
{
@Autowired
private SchemaService schemaService;
@Autowired
private QueryService queryService;
@Autowired
private CriteriaQueryEngine<? extends IdentifiableObject> queryEngine;
@Autowired
private IdentifiableObjectManager identifiableObjectManager;
@Before
public void createDataElements()
{
DataElement dataElementA = createDataElement( 'A' );
dataElementA.setValueType( ValueType.NUMBER );
dataElementA.setDisplayName( "dataElementA" );
dataElementA.setName( "dataElementA" );
DataElement dataElementB = createDataElement( 'B' );
dataElementB.setValueType( ValueType.BOOLEAN );
dataElementB.setDisplayName( "dataElementB" );
dataElementB.setName( "dataElementB" );
DataElement dataElementC = createDataElement( 'C' );
dataElementC.setValueType( ValueType.INTEGER );
dataElementC.setDisplayName( "dataElementC" );
dataElementC.setName( "dataElementC" );
DataElement dataElementD = createDataElement( 'D' );
dataElementD.setValueType( ValueType.NUMBER );
dataElementD.setDisplayName( "dataElementD" );
dataElementD.setName( "dataElementD" );
DataElement dataElementE = createDataElement( 'E' );
dataElementE.setValueType( ValueType.BOOLEAN );
dataElementE.setDisplayName( "dataElementE" );
dataElementE.setName( "dataElementE" );
DataElement dataElementF = createDataElement( 'F' );
dataElementF.setValueType( ValueType.INTEGER );
dataElementF.setDisplayName( "dataElementF" );
dataElementF.setName( "dataElementF" );
dataElementA.setCreated( Year.parseYear( "2001" ).getStart() );
dataElementB.setCreated( Year.parseYear( "2002" ).getStart() );
dataElementC.setCreated( Year.parseYear( "2003" ).getStart() );
dataElementD.setCreated( Year.parseYear( "2004" ).getStart() );
dataElementE.setCreated( Year.parseYear( "2005" ).getStart() );
dataElementF.setCreated( Year.parseYear( "2006" ).getStart() );
identifiableObjectManager.save( dataElementB );
identifiableObjectManager.save( dataElementE );
identifiableObjectManager.save( dataElementA );
identifiableObjectManager.save( dataElementC );
identifiableObjectManager.save( dataElementF );
identifiableObjectManager.save( dataElementD );
DataElementGroup dataElementGroupA = createDataElementGroup( 'A' );
dataElementGroupA.addDataElement( dataElementA );
dataElementGroupA.addDataElement( dataElementB );
dataElementGroupA.addDataElement( dataElementC );
dataElementGroupA.addDataElement( dataElementD );
DataElementGroup dataElementGroupB = createDataElementGroup( 'B' );
dataElementGroupB.addDataElement( dataElementE );
dataElementGroupB.addDataElement( dataElementF );
identifiableObjectManager.save( dataElementGroupA );
identifiableObjectManager.save( dataElementGroupB );
}
private boolean collectionContainsUid( Collection<? extends IdentifiableObject> collection, String uid )
{
for ( IdentifiableObject identifiableObject : collection )
{
if ( identifiableObject.getUid().equals( uid ) )
{
return true;
}
}
return false;
}
@Test
public void getAllQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
assertEquals( 6, queryEngine.query( query ).size() );
}
@Test
public void getMinMaxQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.setFirstResult( 2 );
query.setMaxResults( 10 );
assertEquals( 4, queryEngine.query( query ).size() );
query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.setFirstResult( 2 );
query.setMaxResults( 2 );
assertEquals( 2, queryEngine.query( query ).size() );
}
@Test
public void getEqQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.eq( "id", "deabcdefghA" ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
assertEquals( "deabcdefghA", objects.get( 0 ).getUid() );
}
@Test
public void getNeQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.ne( "id", "deabcdefghA" ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 5, objects.size() );
assertFalse( collectionContainsUid( objects, "deabcdefghA" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghE" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
public void getLikeQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.like( "name", "F", MatchMode.ANYWHERE ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
assertEquals( "deabcdefghF", objects.get( 0 ).getUid() );
}
@Test
public void getGtQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.gt( "created", Year.parseYear( "2003" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 3, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghE" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
public void getLtQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.lt( "created", Year.parseYear( "2003" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghA" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
}
@Test
public void getGeQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.ge( "created", Year.parseYear( "2003" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 4, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghE" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
public void getLeQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.le( "created", Year.parseYear( "2003" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 3, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghA" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
}
@Test
public void getBetweenQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.between( "created", Year.parseYear( "2003" ).getStart(), Year.parseYear( "2005" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 3, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghE" ) );
}
@Test
public void testDateRange()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.ge( "created", Year.parseYear( "2002" ).getStart() ) );
query.add( Restrictions.le( "created", Year.parseYear( "2004" ).getStart() ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 3, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
}
@Test
public void getInQuery()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.in( "id", Lists.newArrayList( "deabcdefghD", "deabcdefghF" ) ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
public void sortNameDesc()
{
Schema schema = schemaService.getDynamicSchema( DataElement.class );
Query query = Query.from( schema );
query.addOrder( new Order( schema.getProperty( "name" ), Direction.DESCENDING ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 6, objects.size() );
assertEquals( "deabcdefghF", objects.get( 0 ).getUid() );
assertEquals( "deabcdefghE", objects.get( 1 ).getUid() );
assertEquals( "deabcdefghD", objects.get( 2 ).getUid() );
assertEquals( "deabcdefghC", objects.get( 3 ).getUid() );
assertEquals( "deabcdefghB", objects.get( 4 ).getUid() );
assertEquals( "deabcdefghA", objects.get( 5 ).getUid() );
}
@Test
public void sortNameAsc()
{
Schema schema = schemaService.getDynamicSchema( DataElement.class );
Query query = Query.from( schema );
query.addOrder( new Order( schema.getProperty( "name" ), Direction.ASCENDING ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 6, objects.size() );
assertEquals( "deabcdefghA", objects.get( 0 ).getUid() );
assertEquals( "deabcdefghB", objects.get( 1 ).getUid() );
assertEquals( "deabcdefghC", objects.get( 2 ).getUid() );
assertEquals( "deabcdefghD", objects.get( 3 ).getUid() );
assertEquals( "deabcdefghE", objects.get( 4 ).getUid() );
assertEquals( "deabcdefghF", objects.get( 5 ).getUid() );
}
@Test
public void sortCreatedDesc()
{
Schema schema = schemaService.getDynamicSchema( DataElement.class );
Query query = Query.from( schema );
query.addOrder( new Order( schema.getProperty( "created" ), Direction.DESCENDING ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 6, objects.size() );
assertEquals( "deabcdefghF", objects.get( 0 ).getUid() );
assertEquals( "deabcdefghE", objects.get( 1 ).getUid() );
assertEquals( "deabcdefghD", objects.get( 2 ).getUid() );
assertEquals( "deabcdefghC", objects.get( 3 ).getUid() );
assertEquals( "deabcdefghB", objects.get( 4 ).getUid() );
assertEquals( "deabcdefghA", objects.get( 5 ).getUid() );
}
@Test
public void sortCreatedAsc()
{
Schema schema = schemaService.getDynamicSchema( DataElement.class );
Query query = Query.from( schema );
query.addOrder( new Order( schema.getProperty( "created" ), Direction.ASCENDING ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 6, objects.size() );
assertEquals( "deabcdefghA", objects.get( 0 ).getUid() );
assertEquals( "deabcdefghB", objects.get( 1 ).getUid() );
assertEquals( "deabcdefghC", objects.get( 2 ).getUid() );
assertEquals( "deabcdefghD", objects.get( 3 ).getUid() );
assertEquals( "deabcdefghE", objects.get( 4 ).getUid() );
assertEquals( "deabcdefghF", objects.get( 5 ).getUid() );
}
@Test
public void testDoubleEqConjunction()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
Conjunction conjunction = query.conjunction();
conjunction.add( Restrictions.eq( "id", "deabcdefghD" ) );
conjunction.add( Restrictions.eq( "id", "deabcdefghF" ) );
query.add( conjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 0, objects.size() );
}
@Test
public void testDoubleEqDisjunction()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
Disjunction disjunction = query.disjunction();
disjunction.add( Restrictions.eq( "id", "deabcdefghD" ) );
disjunction.add( Restrictions.eq( "id", "deabcdefghF" ) );
query.add( disjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
public void testDateRangeWithConjunction()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
Conjunction conjunction = query.conjunction();
conjunction.add( Restrictions.ge( "created", Year.parseYear( "2002" ).getStart() ) );
conjunction.add( Restrictions.le( "created", Year.parseYear( "2004" ).getStart() ) );
query.add( conjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 3, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
}
@Test
public void testIsNull()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.isNull( "categoryCombo" ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 0, objects.size() );
}
@Test
public void testIsNotNull()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
query.add( Restrictions.isNotNull( "categoryCombo" ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 6, objects.size() );
assertTrue( collectionContainsUid( objects, "deabcdefghA" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghB" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghC" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghD" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghE" ) );
assertTrue( collectionContainsUid( objects, "deabcdefghF" ) );
}
@Test
@Ignore
public void testCollectionEqSize4()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ) );
query.add( Restrictions.eq( "dataElements", 4 ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
assertEquals( "abcdefghijA", objects.get( 0 ).getUid() );
}
@Test
@Ignore
public void testCollectionEqSize2()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ) );
query.add( Restrictions.eq( "dataElements", 2 ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
assertEquals( "abcdefghijB", objects.get( 0 ).getUid() );
}
@Test
public void testIdentifiableSearch1()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ), Junction.Type.OR );
query.add( Restrictions.eq( "name", "DataElementGroupA" ) );
query.add( Restrictions.eq( "name", "DataElementGroupB" ) );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
}
@Test
public void testIdentifiableSearch2()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ), Junction.Type.OR );
Junction disjunction = new Disjunction( schemaService.getDynamicSchema( DataElementGroup.class ) );
disjunction.add( Restrictions.eq( "name", "DataElementGroupA" ) );
disjunction.add( Restrictions.eq( "name", "DataElementGroupB" ) );
query.add( disjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
}
@Test
public void testIdentifiableSearch3()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ) );
Junction disjunction = new Disjunction( schemaService.getDynamicSchema( DataElementGroup.class ) );
disjunction.add( Restrictions.like( "name", "GroupA", MatchMode.ANYWHERE ) );
query.add( disjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
}
@Test
public void testIdentifiableSearch4()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ), Junction.Type.OR );
Junction disjunction = new Disjunction( schemaService.getDynamicSchema( DataElementGroup.class ) );
disjunction.add( Restrictions.like( "name", "GroupA", MatchMode.ANYWHERE ) );
disjunction.add( Restrictions.like( "name", "GroupA", MatchMode.ANYWHERE ) );
query.add( disjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
}
@Test
public void testIdentifiableSearch5()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ), Junction.Type.OR );
Junction disjunction = new Disjunction( schemaService.getDynamicSchema( DataElementGroup.class ) );
disjunction.add( Restrictions.like( "name", "GroupA", MatchMode.ANYWHERE ) );
disjunction.add( Restrictions.like( "name", "GroupA", MatchMode.ANYWHERE ) );
disjunction.add( Restrictions.like( "name", "GroupB", MatchMode.ANYWHERE ) );
query.add( disjunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 2, objects.size() );
}
@Test
public void testIdentifiableSearch6()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ), Junction.Type.OR );
Restriction nameRestriction = Restrictions.like( "name", "deF", MatchMode.ANYWHERE );
Restriction uidRestriction = Restrictions.like( "id", "deF", MatchMode.ANYWHERE );
Restriction codeRestriction = Restrictions.like( "code", "deF", MatchMode.ANYWHERE );
Junction identifiableJunction = new Disjunction( schemaService.getDynamicSchema( DataElement.class ) );
identifiableJunction.add( nameRestriction );
identifiableJunction.add( uidRestriction );
identifiableJunction.add( codeRestriction );
query.add( identifiableJunction );
List<? extends IdentifiableObject> objects = queryEngine.query( query );
assertEquals( 1, objects.size() );
}
@Test
@Ignore
public void testIdentifiableSearch7()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ), Junction.Type.OR );
Restriction nameRestriction = Restrictions.like( "name", "dataElement", MatchMode.ANYWHERE );
Restriction uidRestriction = Restrictions.like( "id", "dataElement", MatchMode.ANYWHERE );
Restriction codeRestriction = Restrictions.like( "code", "dataElement", MatchMode.ANYWHERE );
Junction identifiableJunction = new Disjunction( schemaService.getDynamicSchema( DataElement.class ) );
identifiableJunction.add( nameRestriction );
identifiableJunction.add( uidRestriction );
identifiableJunction.add( codeRestriction );
query.add( identifiableJunction );
List<? extends IdentifiableObject> objects = queryService.query( query );
assertEquals( 6, objects.size() );
}
@Test
@Ignore
public void testIdentifiableSearch8()
{
Query query = Query.from( schemaService.getDynamicSchema( DataElement.class ), Junction.Type.OR );
Restriction displayNameRestriction = Restrictions.like( "displayName", "dataElement", MatchMode.ANYWHERE );
Restriction uidRestriction = Restrictions.like( "id", "dataElement", MatchMode.ANYWHERE );
Restriction codeRestriction = Restrictions.like( "code", "dataElement", MatchMode.ANYWHERE );
Junction identifiableJunction = new Disjunction( schemaService.getDynamicSchema( DataElement.class ) );
identifiableJunction.add( displayNameRestriction );
identifiableJunction.add( uidRestriction );
identifiableJunction.add( codeRestriction );
query.add( identifiableJunction );
List<? extends IdentifiableObject> objects = queryService.query( query );
assertEquals( 6, objects.size() );
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.storagegateway.AWSStorageGateway#addWorkingStorage(AddWorkingStorageRequest) AddWorkingStorage operation}.
* <p>
* This operation configures one or more gateway local disks as working storage for a gateway. This operation is supported only for the gateway-stored
* volume architecture.
* </p>
* <p>
* <b>NOTE:</b> Working storage is also referred to as upload buffer. You can also use the AddUploadBuffer operation to add upload buffer to a
* stored-volume gateway.
* </p>
* <p>
* In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add working storage, and one or more disk IDs that you want to
* configure as working storage.
* </p>
*
* @see com.amazonaws.services.storagegateway.AWSStorageGateway#addWorkingStorage(AddWorkingStorageRequest)
*/
public class AddWorkingStorageRequest extends AmazonWebServiceRequest {
/**
* The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*/
private String gatewayARN;
/**
* An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*/
private java.util.List<String> diskIds;
/**
* The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @return The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
*/
public String getGatewayARN() {
return gatewayARN;
}
/**
* The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @param gatewayARN The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
*/
public void setGatewayARN(String gatewayARN) {
this.gatewayARN = gatewayARN;
}
/**
* The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>50 - 500<br/>
*
* @param gatewayARN The Amazon Resource Name (ARN) of the gateway. Use the
* <a>ListGateways</a> operation to return a list of gateways for your
* account and region.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AddWorkingStorageRequest withGatewayARN(String gatewayARN) {
this.gatewayARN = gatewayARN;
return this;
}
/**
* An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*
* @return An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*/
public java.util.List<String> getDiskIds() {
if (diskIds == null) {
diskIds = new java.util.ArrayList<String>();
}
return diskIds;
}
/**
* An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*
* @param diskIds An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*/
public void setDiskIds(java.util.Collection<String> diskIds) {
if (diskIds == null) {
this.diskIds = null;
return;
}
java.util.List<String> diskIdsCopy = new java.util.ArrayList<String>(diskIds.size());
diskIdsCopy.addAll(diskIds);
this.diskIds = diskIdsCopy;
}
/**
* An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param diskIds An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AddWorkingStorageRequest withDiskIds(String... diskIds) {
if (getDiskIds() == null) setDiskIds(new java.util.ArrayList<String>(diskIds.length));
for (String value : diskIds) {
getDiskIds().add(value);
}
return this;
}
/**
* An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param diskIds An array of strings that identify disks that are to be configured as
* working storage. Each string have a minimum length of 1 and maximum
* length of 300. You can get the disk IDs from the <a>ListLocalDisks</a>
* API.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AddWorkingStorageRequest withDiskIds(java.util.Collection<String> diskIds) {
if (diskIds == null) {
this.diskIds = null;
} else {
java.util.List<String> diskIdsCopy = new java.util.ArrayList<String>(diskIds.size());
diskIdsCopy.addAll(diskIds);
this.diskIds = diskIdsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (gatewayARN != null) sb.append("GatewayARN: " + gatewayARN + ", ");
if (diskIds != null) sb.append("DiskIds: " + diskIds + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGatewayARN() == null) ? 0 : getGatewayARN().hashCode());
hashCode = prime * hashCode + ((getDiskIds() == null) ? 0 : getDiskIds().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof AddWorkingStorageRequest == false) return false;
AddWorkingStorageRequest other = (AddWorkingStorageRequest)obj;
if (other.getGatewayARN() == null ^ this.getGatewayARN() == null) return false;
if (other.getGatewayARN() != null && other.getGatewayARN().equals(this.getGatewayARN()) == false) return false;
if (other.getDiskIds() == null ^ this.getDiskIds() == null) return false;
if (other.getDiskIds() != null && other.getDiskIds().equals(this.getDiskIds()) == false) return false;
return true;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.idea.ui.vcsimport;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import com.intellij.util.ui.JBUI;
import com.microsoft.alm.plugin.idea.resources.TfPluginBundle;
import com.microsoft.alm.plugin.idea.ui.common.ServerContextTableModel;
import com.microsoft.alm.plugin.idea.ui.common.SwingHelper;
import com.microsoft.alm.plugin.idea.ui.common.TableModelSelectionConverter;
import com.microsoft.alm.plugin.idea.ui.controls.BusySpinnerPanel;
import com.microsoft.alm.plugin.idea.ui.controls.HintTextFieldUI;
import com.microsoft.alm.plugin.idea.ui.controls.UserAccountPanel;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NonNls;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.RowSorter;
import javax.swing.Timer;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import java.awt.Dimension;
import java.awt.Insets;
import java.awt.event.ActionListener;
import java.util.ResourceBundle;
public class ImportForm {
private JPanel contentPanel;
private UserAccountPanel userAccountPanel;
private JTextField teamProjectFilter;
private JTable teamProjectTable;
private JTextField repositoryName;
private JButton refreshButton;
private BusySpinnerPanel busySpinner;
private JScrollPane teamProjectScrollPane;
private boolean initialized = false;
private Timer timer;
@NonNls
public static final String CMD_PROJECT_FILTER_CHANGED = "teamProjectFilterChanged";
@NonNls
public static final String CMD_REFRESH = "refresh";
public ImportForm(final boolean vsoSelected) {
// The following call is required to initialize the controls on the form
// DO NOT MOVE THIS CALL
$$$setupUI$$$();
userAccountPanel.setWindowsAccount(!vsoSelected);
}
public JPanel getContentPanel() {
ensureInitialized();
return contentPanel;
}
private void ensureInitialized() {
if (!initialized) {
// Ensure that the commands are set up correctly
teamProjectFilter.setActionCommand(CMD_PROJECT_FILTER_CHANGED);
refreshButton.setActionCommand(CMD_REFRESH);
// Fix HiDPI scaling for table
teamProjectTable.setRowHeight(JBUI.scale(teamProjectTable.getRowHeight()));
// Fix tabbing in table
SwingHelper.fixTabKeys(teamProjectTable);
// Set hint text
teamProjectFilter.setUI(new HintTextFieldUI(
TfPluginBundle.message(TfPluginBundle.KEY_IMPORT_DIALOG_FILTER_HINT)));
// Align the busy spinner and the refresh button with the height of the text box
refreshButton.putClientProperty("JButton.buttonType", "square"); // This is a magical property that tells IntelliJ to draw the button like an image button
final int textBoxHeight = (int) teamProjectFilter.getPreferredSize().getHeight();
final Dimension size = new Dimension(textBoxHeight, textBoxHeight);
refreshButton.setMinimumSize(size);
refreshButton.setPreferredSize(size);
busySpinner.setMinimumSize(size);
busySpinner.setPreferredSize(size);
// Setup document events for filter
// Using a timer so that we don't respond to every character typed
// The timer is created in the create components method
teamProjectFilter.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
onFilterChanged();
}
@Override
public void removeUpdate(DocumentEvent e) {
onFilterChanged();
}
@Override
public void changedUpdate(DocumentEvent e) {
onFilterChanged();
}
private void onFilterChanged() {
if (timer.isRunning()) {
timer.restart();
} else {
timer.start();
}
}
});
teamProjectScrollPane.setMinimumSize(new Dimension(JBUI.scale(200), JBUI.scale(70)));
initialized = true;
}
}
public void addActionListener(final ActionListener listener) {
// Hook up listener to all actions
userAccountPanel.addActionListener(listener);
timer.addActionListener(listener);
refreshButton.addActionListener(listener);
}
public void setTeamProjectFilter(final String filter) {
teamProjectFilter.setText(filter);
}
public String getTeamProjectFilter() {
return teamProjectFilter.getText();
}
public void initFocus() {
teamProjectFilter.requestFocus();
}
public void setTeamProjectTable(final ServerContextTableModel tableModel, final ListSelectionModel selectionModel) {
teamProjectTable.setModel(tableModel);
teamProjectTable.setSelectionModel(selectionModel);
// Setup table sorter
final RowSorter<TableModel> sorter = new TableRowSorter<TableModel>(tableModel);
teamProjectTable.setRowSorter(sorter);
// Attach an index converter to fix the indexes if the user sorts the list
tableModel.setSelectionConverter(new TableModelSelectionConverter() {
@Override
public int convertRowIndexToModel(int viewRowIndex) {
if (viewRowIndex >= 0) {
return teamProjectTable.convertRowIndexToModel(viewRowIndex);
}
return viewRowIndex;
}
});
}
public void setRepositoryName(final String name) {
repositoryName.setText(name);
}
public String getRepositoryName() {
return StringUtils.trim(repositoryName.getText());
}
public JComponent getRepositoryNameComponent() {
return repositoryName;
}
public void setUserName(final String name) {
userAccountPanel.setUserName(name);
}
public void setServerName(final String name) {
userAccountPanel.setServerName(name);
}
public void setLoading(final boolean loading) {
if (loading) {
refreshButton.setVisible(false);
busySpinner.start(true);
} else {
busySpinner.stop(true);
refreshButton.setVisible(true);
}
}
private void createUIComponents() {
userAccountPanel = new UserAccountPanel();
// Create timer for filtering the list
timer = new Timer(400, null);
timer.setInitialDelay(400);
timer.setActionCommand(CMD_PROJECT_FILTER_CHANGED);
timer.setRepeats(false);
}
/**
* Getter for unit tests
*/
BusySpinnerPanel getBusySpinner() {
return this.busySpinner;
}
/**
* Getter for unit tests
*/
UserAccountPanel getUserAccountPanel() {
return this.userAccountPanel;
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
createUIComponents();
contentPanel = new JPanel();
contentPanel.setLayout(new GridLayoutManager(7, 3, new Insets(0, 0, 0, 0), -1, -1));
contentPanel.add(userAccountPanel, new GridConstraints(0, 0, 1, 3, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
final Spacer spacer1 = new Spacer();
contentPanel.add(spacer1, new GridConstraints(6, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
final JLabel label1 = new JLabel();
this.$$$loadLabelText$$$(label1, ResourceBundle.getBundle("com/microsoft/alm/plugin/idea/ui/tfplugin").getString("ImportForm.SelectTeamProject"));
contentPanel.add(label1, new GridConstraints(1, 0, 1, 3, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
teamProjectFilter = new JTextField();
contentPanel.add(teamProjectFilter, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, -1), null, 0, false));
teamProjectScrollPane = new JScrollPane();
contentPanel.add(teamProjectScrollPane, new GridConstraints(3, 0, 1, 3, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
teamProjectTable = new JTable();
teamProjectTable.setFillsViewportHeight(true);
teamProjectTable.setShowHorizontalLines(false);
teamProjectTable.setShowVerticalLines(false);
teamProjectScrollPane.setViewportView(teamProjectTable);
final JLabel label2 = new JLabel();
this.$$$loadLabelText$$$(label2, ResourceBundle.getBundle("com/microsoft/alm/plugin/idea/ui/tfplugin").getString("ImportForm.NewRepositoryName"));
contentPanel.add(label2, new GridConstraints(4, 0, 1, 3, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
repositoryName = new JTextField();
contentPanel.add(repositoryName, new GridConstraints(5, 0, 1, 3, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, -1), null, 0, false));
refreshButton = new JButton();
refreshButton.setIcon(new ImageIcon(getClass().getResource("/actions/refresh.png")));
refreshButton.setText("");
refreshButton.setToolTipText(ResourceBundle.getBundle("com/microsoft/alm/plugin/idea/ui/tfplugin").getString("ImportDialog.RefreshButton.ToolTip"));
contentPanel.add(refreshButton, new GridConstraints(2, 2, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
busySpinner = new BusySpinnerPanel();
contentPanel.add(busySpinner, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false));
}
/**
* @noinspection ALL
*/
private void $$$loadLabelText$$$(JLabel component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) break;
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setDisplayedMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
public JComponent $$$getRootComponent$$$() {
return contentPanel;
}
}
| |
package org.bpel4chor.splitprocess.pwdg.util;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.bpel4chor.splitprocess.exceptions.PWDGException;
import org.bpel4chor.splitprocess.exceptions.PartitionSpecificationException;
import org.bpel4chor.splitprocess.partition.model.Participant;
import org.bpel4chor.splitprocess.partition.model.PartitionSpecification;
import org.bpel4chor.splitprocess.pwdg.model.PWDG;
import org.bpel4chor.splitprocess.pwdg.model.PWDGNode;
import org.bpel4chor.splitprocess.pwdg.model.WDG;
import org.bpel4chor.splitprocess.pwdg.model.WDGNode;
import org.eclipse.bpel.model.Activity;
import org.eclipse.bpel.model.BPELFactory;
import org.eclipse.bpel.model.Process;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph.CycleFoundException;
import org.jgrapht.graph.DefaultEdge;
/**
* PWDGFactory constructs PWDG upon an activity and a variable that it reads.
*
* @since Feb 23, 2012
* @author Daojun Cui
*/
public class PWDGFactory {
/**
* Construct PWDG based on the WDG and partition.
*
* @param wdg
* The WDG graph
* @param process
* The non-split process
* @param partitionSpec
* The partition specification
* @return PWDG
* @throws PWDGException
*/
public static PWDG createPWDG(WDG wdg, Process process, PartitionSpecification partitionSpec)
throws PWDGException {
try {
if (wdg == null || partitionSpec == null)
throw new NullPointerException();
// participant to WDG node map
Map<Participant, Set<WDGNode>> part2WDGNodeMap = new HashMap<Participant, Set<WDGNode>>();
// temporary root nodes
Map<Participant, WDGNode> part2RootMap = new HashMap<Participant, WDGNode>();
// 1. place temporary root in each partition of the WDG, cache the data
// into part2RootMap, adn part2WDGNodeMap
insertTempRootNode(wdg, part2RootMap, part2WDGNodeMap, process, partitionSpec);
// 2. form largest weakly connected subgraphs i.e. PWDG Nodes
Set<PWDGNode> pwdgNodes = formPWDGNodes(wdg, part2RootMap, part2WDGNodeMap);
// 3. remove temporary root
removeTempRootNode(wdg, part2RootMap, pwdgNodes);
// new pwdg
PWDG pwdg = new PWDG();
for (PWDGNode v : pwdgNodes)
pwdg.addVertex(v);
// create edges
createPwdgEdge(pwdg, wdg);
return pwdg;
} catch (CycleFoundException e) {
throw new PWDGException("Cycle found in PWDG.", e);
} catch (PartitionSpecificationException e) {
throw new PWDGException("Something wrong in PartitionSpecification.", e);
}
}
/**
* Create edge for pwdg
* <p>
* If any wdgNode inside a pwdgNode has edge to other wdgNode in the other
* pwdgNode, then between the two pwdgNodes there is an pwdgEdge.
*
* @param pwdg
* @param wdg
* @throws CycleFoundException
*/
protected static void createPwdgEdge(PWDG pwdg, WDG wdg) throws CycleFoundException {
PWDGNode[] pwdgNodes = pwdg.vertexSet().toArray(new PWDGNode[0]);
for (int i = 0; i < pwdgNodes.length - 1; i++) {
for (int j = i + 1; j < pwdgNodes.length; j++) {
PWDGNode pwNode1 = pwdgNodes[i];
PWDGNode pwNode2 = pwdgNodes[j];
if (hasWdgEdgeBetween(pwNode1, pwNode2, wdg)) {
pwdg.addDagEdge(pwNode1, pwNode2);
} else if (hasWdgEdgeBetween(pwNode2, pwNode1, wdg)) {
pwdg.addDagEdge(pwNode2, pwNode1);
}
}
}
}
/**
* Test if there is wdg edge from n1 to n2
*
* @param n1
* @param n2
* @param wdg
* @return
*/
protected static boolean hasWdgEdgeBetween(PWDGNode n1, PWDGNode n2, WDG wdg) {
for (WDGNode wnode1 : n1.getWdgNodes()) {
for (WDGNode wnode2 : n2.getWdgNodes()) {
if (wdg.getEdge(wnode1, wnode2) != null)
return true;
}
}
return false;
}
/**
* Place a temporary root node in each participant where there are wdg
* nodes.
*
* @param wdg
* WDG graph
* @param part2RootMap
* participant to temporary root map
* @param part2wdgNodeMap
* Participant to WDG Nodes Set Map
* @param process
* The non-split process
* @param partitionSpec
* PartitionSpecification
* @throws CycleFoundException
* @throws PartitionSpecificationException
*/
protected static void insertTempRootNode(WDG wdg, Map<Participant, WDGNode> part2RootMap,
Map<Participant, Set<WDGNode>> part2wdgNodeMap, Process process, PartitionSpecification partitionSpec)
throws CycleFoundException, PartitionSpecificationException {
if (wdg == null || part2RootMap == null || part2wdgNodeMap == null || process == null || partitionSpec == null)
throw new NullPointerException();
for (Participant participant : partitionSpec.getParticipants()) {
// wdg nodes in participant
Set<WDGNode> wdgNodesInPart = new HashSet<WDGNode>();
// get all basic activities in this participant
Set<Activity> actsInParticipant = participant.getActivities();
// get all wdg nodes
Set<WDGNode> allWdgNodes = wdg.vertexSet();
// collect the wdg nodes that reside in this participant
for (WDGNode node : allWdgNodes) {
for (Activity act : actsInParticipant)
if (node.activity().equals(act))
wdgNodesInPart.add(node);
}
// if there are no nodes in this participant, move on
if (wdgNodesInPart.size() == 0)
continue;
//
// if there are wdgNodes in this participant,
// try combine the tempRoot to the nodes that do not have
// incoming link from the same partition
//
// create a temp root, add to the part2rootmap
WDGNode tempRoot = new WDGNode(BPELFactory.eINSTANCE.createActivity());
tempRoot.activity().setName(participant.getName().concat("RootNode"));
part2RootMap.put(participant, tempRoot);
// add to wdg
wdg.addVertex(tempRoot);
// combine tempRoot to node
for (WDGNode node : wdgNodesInPart) {
if (hasIncomingEdgeFromSamePartition(node, wdgNodesInPart, wdg) == false) {
wdg.addDagEdge(tempRoot, node);
}
}
// now collect tempRoot in wdgNodes of Participant too
wdgNodesInPart.add(tempRoot);
// save the pair participant to wdgNodeInPart in map
part2wdgNodeMap.put(participant, wdgNodesInPart);
}
}
/**
* Test whether the nodeInPart has incoming edge from the same partition.
*
* @param nodeInPart
* The node in the current participant
* @param nodesCurrPart
* The wdg nodes in current participant
* @param wdg
* The WDG graph
* @return true if it has incoming edge from same partition, otherwise
* false.
*/
protected static boolean hasIncomingEdgeFromSamePartition(WDGNode nodeInPart, Set<WDGNode> nodesCurrPart, WDG wdg) {
Set<DefaultEdge> incomingEdges = wdg.incomingEdgesOf(nodeInPart);
for (DefaultEdge inEdge : incomingEdges) {
WDGNode sourceNode = wdg.getEdgeSource(inEdge);
if (nodesCurrPart.contains(sourceNode)) {
return true;
}
}
return false;
}
/**
* Form pwdg nodes
*
* @param wdg
* @param part2RootMap
* @param part2wdgNodeMap
* @return
* @throws PartitionSpecificationException
*/
protected static Set<PWDGNode> formPWDGNodes(WDG wdg, Map<Participant, WDGNode> part2RootMap,
Map<Participant, Set<WDGNode>> part2wdgNodeMap) throws PartitionSpecificationException {
if (wdg == null || part2RootMap == null || part2wdgNodeMap == null)
throw new NullPointerException();
PWDGNodeConstructor helper = new PWDGNodeConstructor(wdg, part2RootMap, part2wdgNodeMap);
Set<PWDGNode> pwdgNodes = helper.formNodes();
return pwdgNodes;
}
/**
* Remove the temporary root node, not only from PWDG, also from WDG
* @param wdg
* @param part2RootMap
* @param pwdgNodes
*/
protected static void removeTempRootNode(WDG wdg, Map<Participant, WDGNode> part2RootMap, Set<PWDGNode> pwdgNodes) {
// delete from pwdg
for (WDGNode root : part2RootMap.values()) {
for (PWDGNode pwnode : pwdgNodes) {
pwnode.remove(root);
}
}
// delete from wdg
for(WDGNode root : part2RootMap.values()) {
wdg.removeVertex(root);
}
}
}
| |
/**
* Copyright 2013 Expedia, Inc. All rights reserved.
* EXPEDIA PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
package com.expedia.echox3.basics.tools.misc;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Date;
import com.expedia.echox3.basics.file.BaseFileHandler;
import com.expedia.echox3.basics.file.BasicFileReader;
import com.expedia.echox3.basics.file.BasicFileWriter;
import com.expedia.echox3.basics.monitoring.event.BasicEvent;
import com.expedia.echox3.basics.monitoring.event.BasicException;
import com.expedia.echox3.basics.monitoring.event.BasicLogger;
import com.expedia.echox3.basics.monitoring.event.BasicRuntimeException;
public class PrimeNumbers
{
private static final BasicLogger LOGGER = new BasicLogger(PrimeNumbers.class);
private static final String PRIME_FOLDER = "Prime";
private static final String PRIME16_FILENAME = "Prime16.dat";
private static final String PRIME32_FILENAME = "Prime32.dat";
private static final int PRIME16_COUNT = 6542;
private static final int PRIME32_COUNT = 105097565;
private static final PrimeNumbers INSTANCE = new PrimeNumbers();
private int[] m_primeList;
private int m_cPrime = 0;
private String m_filename;
private PrimeNumbers()
{
boolean is32 = BasicTools.getHeapSizeMax() > (1024 * 1024 * 1024);
setSize(is32);
}
private void setSize(boolean is32)
{
int count;
if (is32)
{
m_filename = PRIME32_FILENAME;
count = PRIME32_COUNT;
}
else
{
m_filename = PRIME16_FILENAME;
count = PRIME16_COUNT;
}
m_primeList = new int[count];
}
public static BasicLogger getLogger()
{
return LOGGER;
}
public static boolean isPrime(long n)
{
if (0 == INSTANCE.m_cPrime)
{
try
{
INSTANCE.read();
}
catch (Exception exception)
{
INSTANCE.setSize(false);
INSTANCE.generatePrimeListInternal();
}
}
return INSTANCE.isPrimeInternal(n);
}
public static int nextPrime(int n)
{
while (!isPrime(n))
{
n++;
}
return n;
}
public static long nextPrime(long n)
{
while (!isPrime(n))
{
n++;
}
return n;
}
public static void main(String[] args) throws BasicException
{
if (0 != args.length)
{
testPrimeList(args);
}
else
{
generatePrimeList();
}
}
private static void testPrimeList(String[] args)
{
timeStamp("Loading the prime file");
isPrime(7);
showPrimeList(2, 100);
timeStamp("Testing the fixed list");
long[] fixedList = {
5, 47, 97, 101, 103, 107, 109,
Integer.MAX_VALUE,
Integer.MAX_VALUE + 2L,
Long.MAX_VALUE - 2,
Long.MAX_VALUE,
};
testPrimeList(fixedList);
timeStamp("Testing the parameter list");
long[] valueList = new long[args.length];
for (int i = 0; i < args.length; i++)
{
long n = Long.parseLong(args[i]);
valueList[i] = n;
}
testPrimeList(valueList);
}
private static void testPrimeList(long[] valueList)
{
for (long value : valueList)
{
boolean isPrime = isPrime(value);
timeStamp(String.format("isPrime(%,26d) = %s", value, isPrime));
}
}
private static void showPrimeList(long min, long max)
{
for (long value = min; value < max; value++)
{
boolean isPrime = isPrime(value);
if (isPrime)
{
timeStamp(String.format("isPrime(%,26d) = %s", value, isPrime));
}
}
}
private static void generatePrimeList() throws BasicException
{
PrimeNumbers generator = new PrimeNumbers();
timeStamp("Starting");
generator.generatePrimeListInternal();
timeStamp("... saving ...");
generator.save(generator.m_primeList);
timeStamp("... reading ...");
PrimeNumbers validator = new PrimeNumbers();
validator.read();
timeStamp("... validating ...");
if (validator.m_cPrime != generator.m_cPrime)
{
errorStamp("Length validation failed", null);
}
if (0 == validator.m_primeList[validator.m_primeList.length - 1])
{
errorStamp("Last element validation failed", null);
}
timeStamp("Done.");
}
private void generatePrimeListInternal()
{
addPrime( 2);
addPrime( 3);
addPrime( 5);
addPrime( 7);
addPrime(11);
addPrime(13);
addPrime(17);
addPrime(19);
// short n;
int n;
for (n = 21; n > 0; n += 2) // Until rollover into negative, to include Integer.MAX_VALUE
{
if (1 == n % (100 * 1000 * 1000))
{
getLogger().info(BasicEvent.EVENT_PRIME_NUMBERS_FOUND,
"@ %,14d: Found %,14d primes. Highest prime is %,14d.",
n, m_cPrime, m_primeList[m_cPrime - 1]);
}
if (isPrimeInternal(n))
{
addPrime(n);
}
if (m_primeList.length == m_cPrime)
{
getLogger().debug(BasicEvent.EVENT_DEBUG, "Prime list is full!");
break;
}
}
getLogger().info(BasicEvent.EVENT_PRIME_NUMBERS_GENERATED,
"@ %,14d: Generated %,14d primes. Highest prime is %,14d.",
n, m_cPrime, m_primeList[m_cPrime - 1]);
}
private boolean isPrimeInternal(long n) throws BasicRuntimeException
{
long primeMax = Double.valueOf(Math.sqrt(n)).longValue();
for (int i = 0; i < m_cPrime; i++)
{
int prime = m_primeList[i];
if (prime > primeMax)
{
break;
}
if (0 == (n % prime))
{
return false;
}
}
return true;
}
private void addPrime(int n)
{
m_primeList[m_cPrime++] = n;
}
private void save(int[] primeList) throws BasicException
{
String filename = BaseFileHandler.getWorkingFilename(PRIME_FOLDER, m_filename);
BasicFileWriter writer = new BasicFileWriter();
ObjectOutputStream objectStream = null;
try
{
writer.openStream(filename);
objectStream = new ObjectOutputStream(writer.getOutputStream());
objectStream.writeObject(primeList);
}
catch (Exception exception)
{
throw new BasicException(BasicEvent.EVENT_PRIME_WRITE_FAILED,
exception, "Failed to write primes to file " + filename);
}
finally
{
BaseFileHandler.closeSafe(objectStream);
writer.close();
}
}
private void read()
{
String filename = BaseFileHandler.getWorkingFilename(PRIME_FOLDER, m_filename);
BasicFileReader reader = new BasicFileReader();
ObjectInputStream objectStream = null;
try
{
reader.openStream(filename);
objectStream = new ObjectInputStream(reader.getInputStream());
Object object = objectStream.readObject();
m_primeList = (int[]) object;
m_cPrime = m_primeList.length;
}
catch (Exception exception)
{
throw new BasicRuntimeException(BasicEvent.EVENT_PRIME_READ_FAILED,
exception, "Failed to read primes from file " + filename);
}
finally
{
BaseFileHandler.closeSafe(objectStream);
reader.close();
}
getLogger().info(BasicEvent.EVENT_PRIME_NUMBERS_READ,
"Read %,14d primes. Highest prime is %,14d.",
m_cPrime, m_primeList[m_cPrime - 1]);
}
@SuppressWarnings("PMD.SystemPrintln") // This IS a command line application!
public static void timeStamp(String message)
{
Date date = new Date();
String fullMessage = String.format("%1$tY/%1$tm/%1$td %1$tH:%1$tM:%1$tS.%1$tL %2$s", date, message);
System.out.println(fullMessage);
}
@SuppressWarnings("PMD.SystemPrintln") // This IS a command line application!
public static void errorStamp(String operation, Throwable throwable)
{
String message = operation;
if (null != throwable)
{
message = String.format("Exception in '%s' -> %s: %s",
operation, throwable.getClass().getSimpleName(), throwable.getMessage());
}
Date date = new Date();
String fullMessage = String.format("%1$tY/%1$tm/%1$td %1$tH:%1$tM:%1$tS.%1$tL %2$s", date, message);
System.err.println(fullMessage);
if (null != throwable)
{
throwable.printStackTrace(System.err);
}
}
}
| |
package io.dropwizard.jersey.validation;
import io.dropwizard.jersey.errors.ErrorMessage;
import org.junit.jupiter.api.Test;
import javax.annotation.Nullable;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ParamConverter;
import java.lang.annotation.Annotation;
import static java.util.Objects.requireNonNull;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
class FuzzyEnumParamConverterProviderTest {
private final FuzzyEnumParamConverterProvider paramConverterProvider = new FuzzyEnumParamConverterProvider();
private enum Fuzzy {
A_1,
A_2
}
private enum WithToString {
A_1,
A_2;
@Override
public String toString() {
return "<" + this.name() + ">";
}
}
private enum ExplicitFromString {
A("1"),
B("2");
private final String code;
ExplicitFromString(String code) {
this.code = code;
}
@Nullable
public static ExplicitFromString fromString(String str) {
for (ExplicitFromString e : ExplicitFromString.values()) {
if (str.equals(e.code)) {
return e;
}
}
return null;
}
}
private enum ExplicitFromStringThrowsWebApplicationException {
A("1"),
B("2");
private final String code;
ExplicitFromStringThrowsWebApplicationException(String code) {
this.code = code;
}
@SuppressWarnings("unused")
public String getCode() {
return this.code;
}
@SuppressWarnings("unused")
public static ExplicitFromStringThrowsWebApplicationException fromString(String str) {
throw new WebApplicationException(Response.status(new Response.StatusType() {
@Override
public int getStatusCode() {
return 418;
}
@Override
public Response.Status.Family getFamily() {
return Response.Status.Family.CLIENT_ERROR;
}
@Override
public String getReasonPhrase() {
return "I am a teapot";
}
}).build());
}
}
private enum ExplicitFromStringThrowsOtherException {
A("1"),
B("2");
private final String code;
ExplicitFromStringThrowsOtherException(String code) {
this.code = code;
}
@SuppressWarnings("unused")
public String getCode() {
return this.code;
}
@SuppressWarnings("unused")
public static ExplicitFromStringThrowsOtherException fromString(String str) {
throw new RuntimeException("Boo!");
}
}
private enum ExplicitFromStringNonStatic {
A("1"),
B("2");
private final String code;
ExplicitFromStringNonStatic(String code) {
this.code = code;
}
@Nullable
public ExplicitFromStringNonStatic fromString(String str) {
for (ExplicitFromStringNonStatic e : ExplicitFromStringNonStatic.values()) {
if (str.equals(e.code)) {
return e;
}
}
return null;
}
}
private enum ExplicitFromStringPrivate {
A("1"),
B("2");
private final String code;
ExplicitFromStringPrivate(String code) {
this.code = code;
}
@Nullable
private static ExplicitFromStringPrivate fromString(String str) {
for (ExplicitFromStringPrivate e : ExplicitFromStringPrivate.values()) {
if (str.equals(e.code)) {
return e;
}
}
return null;
}
}
static class Klass {
}
private <T> ParamConverter<T> getConverter(Class<T> rawType) {
return requireNonNull(paramConverterProvider.getConverter(rawType, null, new Annotation[] {}));
}
@Test
void testFuzzyEnum() {
final ParamConverter<Fuzzy> converter = getConverter(Fuzzy.class);
assertThat(converter.fromString(null)).isNull();
assertThat(converter.fromString("A.1")).isSameAs(Fuzzy.A_1);
assertThat(converter.fromString("A-1")).isSameAs(Fuzzy.A_1);
assertThat(converter.fromString("A_1")).isSameAs(Fuzzy.A_1);
assertThat(converter.fromString(" A_1")).isSameAs(Fuzzy.A_1);
assertThat(converter.fromString("A_1 ")).isSameAs(Fuzzy.A_1);
assertThat(converter.fromString("A_2")).isSameAs(Fuzzy.A_2);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("B"))
.extracting(e -> (ErrorMessage)e.getResponse().getEntity())
.matches(e -> e.getCode() == 400)
.matches(e -> e.getMessage().contains("A_1"))
.matches(e -> e.getMessage().contains("A_2"));
}
@Test
void testToString() {
final ParamConverter<WithToString> converter = getConverter(WithToString.class);
assertThat(converter.toString(WithToString.A_1)).isEqualTo("<A_1>");
}
@Test
void testNonEnum() {
assertThat(paramConverterProvider.getConverter(Klass.class, null, new Annotation[] {})).isNull();
}
@Test
void testEnumViaExplicitFromString() {
final ParamConverter<ExplicitFromString> converter = getConverter(ExplicitFromString.class);
assertThat(converter.fromString("1")).isSameAs(ExplicitFromString.A);
assertThat(converter.fromString("2")).isSameAs(ExplicitFromString.B);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("3"))
.extracting(e -> (ErrorMessage)e.getResponse().getEntity())
.matches(e -> e.getCode() == 400)
.matches(e -> e.getMessage().contains("is not a valid ExplicitFromString"));
}
@Test
void testEnumViaExplicitFromStringThatThrowsWebApplicationException() {
final ParamConverter<ExplicitFromStringThrowsWebApplicationException> converter =
getConverter(ExplicitFromStringThrowsWebApplicationException.class);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("3"))
.extracting(e -> e.getResponse().getStatusInfo())
.matches(e -> e.getStatusCode() == 418)
.matches(e -> e.getReasonPhrase().contains("I am a teapot"));
}
@Test
void testEnumViaExplicitFromStringThatThrowsOtherException() {
final ParamConverter<ExplicitFromStringThrowsOtherException> converter =
getConverter(ExplicitFromStringThrowsOtherException.class);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("1"))
.extracting(e -> (ErrorMessage)e.getResponse().getEntity())
.matches(e -> e.getCode() == 400)
.matches(e -> e.getMessage().contains("Failed to convert"));
}
@Test
void testEnumViaExplicitFromStringNonStatic() {
final ParamConverter<ExplicitFromStringNonStatic> converter = getConverter(ExplicitFromStringNonStatic.class);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("1"))
.extracting(e -> (ErrorMessage)e.getResponse().getEntity())
.matches(e -> e.getCode() == 400)
.matches(e -> e.getMessage().contains("A"))
.matches(e -> e.getMessage().contains("B"));
assertThat(converter.fromString("A")).isSameAs(ExplicitFromStringNonStatic.A);
}
@Test
void testEnumViaExplicitFromStringPrivate() {
final ParamConverter<ExplicitFromStringPrivate> converter = getConverter(ExplicitFromStringPrivate.class);
assertThatExceptionOfType(WebApplicationException.class)
.isThrownBy(() -> converter.fromString("1"))
.extracting(e -> (ErrorMessage)e.getResponse().getEntity())
.matches(e -> e.getCode() == 400)
.matches(e -> e.getMessage().contains("Not permitted to call"));
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/resources/ad.proto
package com.google.ads.googleads.v9.resources;
public interface AdOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v9.resources.Ad)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Immutable. The resource name of the ad.
* Ad resource names have the form:
* `customers/{customer_id}/ads/{ad_id}`
* </pre>
*
* <code>string resource_name = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* <pre>
* Immutable. The resource name of the ad.
* Ad resource names have the form:
* `customers/{customer_id}/ads/{ad_id}`
* </pre>
*
* <code>string resource_name = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <pre>
* Output only. The ID of the ad.
* </pre>
*
* <code>optional int64 id = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the id field is set.
*/
boolean hasId();
/**
* <pre>
* Output only. The ID of the ad.
* </pre>
*
* <code>optional int64 id = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The id.
*/
long getId();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @return A list containing the finalUrls.
*/
java.util.List<java.lang.String>
getFinalUrlsList();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @return The count of finalUrls.
*/
int getFinalUrlsCount();
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @param index The index of the element to return.
* @return The finalUrls at the given index.
*/
java.lang.String getFinalUrls(int index);
/**
* <pre>
* The list of possible final URLs after all cross-domain redirects for the
* ad.
* </pre>
*
* <code>repeated string final_urls = 41;</code>
* @param index The index of the value to return.
* @return The bytes of the finalUrls at the given index.
*/
com.google.protobuf.ByteString
getFinalUrlsBytes(int index);
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.FinalAppUrl final_app_urls = 35;</code>
*/
java.util.List<com.google.ads.googleads.v9.common.FinalAppUrl>
getFinalAppUrlsList();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.FinalAppUrl final_app_urls = 35;</code>
*/
com.google.ads.googleads.v9.common.FinalAppUrl getFinalAppUrls(int index);
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.FinalAppUrl final_app_urls = 35;</code>
*/
int getFinalAppUrlsCount();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.FinalAppUrl final_app_urls = 35;</code>
*/
java.util.List<? extends com.google.ads.googleads.v9.common.FinalAppUrlOrBuilder>
getFinalAppUrlsOrBuilderList();
/**
* <pre>
* A list of final app URLs that will be used on mobile if the user has the
* specific app installed.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.FinalAppUrl final_app_urls = 35;</code>
*/
com.google.ads.googleads.v9.common.FinalAppUrlOrBuilder getFinalAppUrlsOrBuilder(
int index);
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @return A list containing the finalMobileUrls.
*/
java.util.List<java.lang.String>
getFinalMobileUrlsList();
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @return The count of finalMobileUrls.
*/
int getFinalMobileUrlsCount();
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @param index The index of the element to return.
* @return The finalMobileUrls at the given index.
*/
java.lang.String getFinalMobileUrls(int index);
/**
* <pre>
* The list of possible final mobile URLs after all cross-domain redirects
* for the ad.
* </pre>
*
* <code>repeated string final_mobile_urls = 42;</code>
* @param index The index of the value to return.
* @return The bytes of the finalMobileUrls at the given index.
*/
com.google.protobuf.ByteString
getFinalMobileUrlsBytes(int index);
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return Whether the trackingUrlTemplate field is set.
*/
boolean hasTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return The trackingUrlTemplate.
*/
java.lang.String getTrackingUrlTemplate();
/**
* <pre>
* The URL template for constructing a tracking URL.
* </pre>
*
* <code>optional string tracking_url_template = 43;</code>
* @return The bytes for trackingUrlTemplate.
*/
com.google.protobuf.ByteString
getTrackingUrlTemplateBytes();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return Whether the finalUrlSuffix field is set.
*/
boolean hasFinalUrlSuffix();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return The finalUrlSuffix.
*/
java.lang.String getFinalUrlSuffix();
/**
* <pre>
* The suffix to use when constructing a final URL.
* </pre>
*
* <code>optional string final_url_suffix = 44;</code>
* @return The bytes for finalUrlSuffix.
*/
com.google.protobuf.ByteString
getFinalUrlSuffixBytes();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.CustomParameter url_custom_parameters = 10;</code>
*/
java.util.List<com.google.ads.googleads.v9.common.CustomParameter>
getUrlCustomParametersList();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.CustomParameter url_custom_parameters = 10;</code>
*/
com.google.ads.googleads.v9.common.CustomParameter getUrlCustomParameters(int index);
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.CustomParameter url_custom_parameters = 10;</code>
*/
int getUrlCustomParametersCount();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.CustomParameter url_custom_parameters = 10;</code>
*/
java.util.List<? extends com.google.ads.googleads.v9.common.CustomParameterOrBuilder>
getUrlCustomParametersOrBuilderList();
/**
* <pre>
* The list of mappings that can be used to substitute custom parameter tags
* in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`.
* For mutates, please use url custom parameter operations.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.CustomParameter url_custom_parameters = 10;</code>
*/
com.google.ads.googleads.v9.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder(
int index);
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return Whether the displayUrl field is set.
*/
boolean hasDisplayUrl();
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return The displayUrl.
*/
java.lang.String getDisplayUrl();
/**
* <pre>
* The URL that appears in the ad description for some ad formats.
* </pre>
*
* <code>optional string display_url = 45;</code>
* @return The bytes for displayUrl.
*/
com.google.protobuf.ByteString
getDisplayUrlBytes();
/**
* <pre>
* Output only. The type of ad.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.AdTypeEnum.AdType type = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
* <pre>
* Output only. The type of ad.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.AdTypeEnum.AdType type = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The type.
*/
com.google.ads.googleads.v9.enums.AdTypeEnum.AdType getType();
/**
* <pre>
* Output only. Indicates if this ad was automatically added by Google Ads and not by a
* user. For example, this could happen when ads are automatically created as
* suggestions for new ads based on knowledge of how existing ads are
* performing.
* </pre>
*
* <code>optional bool added_by_google_ads = 46 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the addedByGoogleAds field is set.
*/
boolean hasAddedByGoogleAds();
/**
* <pre>
* Output only. Indicates if this ad was automatically added by Google Ads and not by a
* user. For example, this could happen when ads are automatically created as
* suggestions for new ads based on knowledge of how existing ads are
* performing.
* </pre>
*
* <code>optional bool added_by_google_ads = 46 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The addedByGoogleAds.
*/
boolean getAddedByGoogleAds();
/**
* <pre>
* The device preference for the ad. You can only specify a preference for
* mobile devices. When this preference is set the ad will be preferred over
* other ads when being displayed on a mobile device. The ad can still be
* displayed on other device types, e.g. if no other ads are available.
* If unspecified (no device preference), all devices are targeted.
* This is only supported by some ad types.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.DeviceEnum.Device device_preference = 20;</code>
* @return The enum numeric value on the wire for devicePreference.
*/
int getDevicePreferenceValue();
/**
* <pre>
* The device preference for the ad. You can only specify a preference for
* mobile devices. When this preference is set the ad will be preferred over
* other ads when being displayed on a mobile device. The ad can still be
* displayed on other device types, e.g. if no other ads are available.
* If unspecified (no device preference), all devices are targeted.
* This is only supported by some ad types.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.DeviceEnum.Device device_preference = 20;</code>
* @return The devicePreference.
*/
com.google.ads.googleads.v9.enums.DeviceEnum.Device getDevicePreference();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.UrlCollection url_collections = 26;</code>
*/
java.util.List<com.google.ads.googleads.v9.common.UrlCollection>
getUrlCollectionsList();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.UrlCollection url_collections = 26;</code>
*/
com.google.ads.googleads.v9.common.UrlCollection getUrlCollections(int index);
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.UrlCollection url_collections = 26;</code>
*/
int getUrlCollectionsCount();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.UrlCollection url_collections = 26;</code>
*/
java.util.List<? extends com.google.ads.googleads.v9.common.UrlCollectionOrBuilder>
getUrlCollectionsOrBuilderList();
/**
* <pre>
* Additional URLs for the ad that are tagged with a unique identifier that
* can be referenced from other fields in the ad.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.common.UrlCollection url_collections = 26;</code>
*/
com.google.ads.googleads.v9.common.UrlCollectionOrBuilder getUrlCollectionsOrBuilder(
int index);
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the name field is set.
*/
boolean hasName();
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The name.
*/
java.lang.String getName();
/**
* <pre>
* Immutable. The name of the ad. This is only used to be able to identify the ad. It
* does not need to be unique and does not affect the served ad. The name
* field is currently only supported for DisplayUploadAd, ImageAd,
* ShoppingComparisonListingAd and VideoAd.
* </pre>
*
* <code>optional string name = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The bytes for name.
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <pre>
* Output only. If this ad is system managed, then this field will indicate the source.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource system_managed_resource_source = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for systemManagedResourceSource.
*/
int getSystemManagedResourceSourceValue();
/**
* <pre>
* Output only. If this ad is system managed, then this field will indicate the source.
* This field is read-only.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource system_managed_resource_source = 27 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The systemManagedResourceSource.
*/
com.google.ads.googleads.v9.enums.SystemManagedResourceSourceEnum.SystemManagedResourceSource getSystemManagedResourceSource();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the textAd field is set.
*/
boolean hasTextAd();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The textAd.
*/
com.google.ads.googleads.v9.common.TextAdInfo getTextAd();
/**
* <pre>
* Immutable. Details pertaining to a text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.TextAdInfo text_ad = 6 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v9.common.TextAdInfoOrBuilder getTextAdOrBuilder();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
* @return Whether the expandedTextAd field is set.
*/
boolean hasExpandedTextAd();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
* @return The expandedTextAd.
*/
com.google.ads.googleads.v9.common.ExpandedTextAdInfo getExpandedTextAd();
/**
* <pre>
* Details pertaining to an expanded text ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedTextAdInfo expanded_text_ad = 7;</code>
*/
com.google.ads.googleads.v9.common.ExpandedTextAdInfoOrBuilder getExpandedTextAdOrBuilder();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.CallAdInfo call_ad = 49;</code>
* @return Whether the callAd field is set.
*/
boolean hasCallAd();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.CallAdInfo call_ad = 49;</code>
* @return The callAd.
*/
com.google.ads.googleads.v9.common.CallAdInfo getCallAd();
/**
* <pre>
* Details pertaining to a call ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.CallAdInfo call_ad = 49;</code>
*/
com.google.ads.googleads.v9.common.CallAdInfoOrBuilder getCallAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the expandedDynamicSearchAd field is set.
*/
boolean hasExpandedDynamicSearchAd();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The expandedDynamicSearchAd.
*/
com.google.ads.googleads.v9.common.ExpandedDynamicSearchAdInfo getExpandedDynamicSearchAd();
/**
* <pre>
* Immutable. Details pertaining to an Expanded Dynamic Search Ad.
* This type of ad has its headline, final URLs, and display URL
* auto-generated at serving time according to domain name specific
* information provided by `dynamic_search_ads_setting` linked at the
* campaign level.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ExpandedDynamicSearchAdInfo expanded_dynamic_search_ad = 14 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v9.common.ExpandedDynamicSearchAdInfoOrBuilder getExpandedDynamicSearchAdOrBuilder();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.HotelAdInfo hotel_ad = 15;</code>
* @return Whether the hotelAd field is set.
*/
boolean hasHotelAd();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.HotelAdInfo hotel_ad = 15;</code>
* @return The hotelAd.
*/
com.google.ads.googleads.v9.common.HotelAdInfo getHotelAd();
/**
* <pre>
* Details pertaining to a hotel ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.HotelAdInfo hotel_ad = 15;</code>
*/
com.google.ads.googleads.v9.common.HotelAdInfoOrBuilder getHotelAdOrBuilder();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
* @return Whether the shoppingSmartAd field is set.
*/
boolean hasShoppingSmartAd();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
* @return The shoppingSmartAd.
*/
com.google.ads.googleads.v9.common.ShoppingSmartAdInfo getShoppingSmartAd();
/**
* <pre>
* Details pertaining to a Smart Shopping ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingSmartAdInfo shopping_smart_ad = 17;</code>
*/
com.google.ads.googleads.v9.common.ShoppingSmartAdInfoOrBuilder getShoppingSmartAdOrBuilder();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
* @return Whether the shoppingProductAd field is set.
*/
boolean hasShoppingProductAd();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
* @return The shoppingProductAd.
*/
com.google.ads.googleads.v9.common.ShoppingProductAdInfo getShoppingProductAd();
/**
* <pre>
* Details pertaining to a Shopping product ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingProductAdInfo shopping_product_ad = 18;</code>
*/
com.google.ads.googleads.v9.common.ShoppingProductAdInfoOrBuilder getShoppingProductAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the gmailAd field is set.
*/
boolean hasGmailAd();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The gmailAd.
*/
com.google.ads.googleads.v9.common.GmailAdInfo getGmailAd();
/**
* <pre>
* Immutable. Details pertaining to a Gmail ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.GmailAdInfo gmail_ad = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v9.common.GmailAdInfoOrBuilder getGmailAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the imageAd field is set.
*/
boolean hasImageAd();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The imageAd.
*/
com.google.ads.googleads.v9.common.ImageAdInfo getImageAd();
/**
* <pre>
* Immutable. Details pertaining to an Image ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ImageAdInfo image_ad = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v9.common.ImageAdInfoOrBuilder getImageAdOrBuilder();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoAdInfo video_ad = 24;</code>
* @return Whether the videoAd field is set.
*/
boolean hasVideoAd();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoAdInfo video_ad = 24;</code>
* @return The videoAd.
*/
com.google.ads.googleads.v9.common.VideoAdInfo getVideoAd();
/**
* <pre>
* Details pertaining to a Video ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoAdInfo video_ad = 24;</code>
*/
com.google.ads.googleads.v9.common.VideoAdInfoOrBuilder getVideoAdOrBuilder();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
* @return Whether the videoResponsiveAd field is set.
*/
boolean hasVideoResponsiveAd();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
* @return The videoResponsiveAd.
*/
com.google.ads.googleads.v9.common.VideoResponsiveAdInfo getVideoResponsiveAd();
/**
* <pre>
* Details pertaining to a Video responsive ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.VideoResponsiveAdInfo video_responsive_ad = 39;</code>
*/
com.google.ads.googleads.v9.common.VideoResponsiveAdInfoOrBuilder getVideoResponsiveAdOrBuilder();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
* @return Whether the responsiveSearchAd field is set.
*/
boolean hasResponsiveSearchAd();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
* @return The responsiveSearchAd.
*/
com.google.ads.googleads.v9.common.ResponsiveSearchAdInfo getResponsiveSearchAd();
/**
* <pre>
* Details pertaining to a responsive search ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveSearchAdInfo responsive_search_ad = 25;</code>
*/
com.google.ads.googleads.v9.common.ResponsiveSearchAdInfoOrBuilder getResponsiveSearchAdOrBuilder();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
* @return Whether the legacyResponsiveDisplayAd field is set.
*/
boolean hasLegacyResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
* @return The legacyResponsiveDisplayAd.
*/
com.google.ads.googleads.v9.common.LegacyResponsiveDisplayAdInfo getLegacyResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a legacy responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyResponsiveDisplayAdInfo legacy_responsive_display_ad = 28;</code>
*/
com.google.ads.googleads.v9.common.LegacyResponsiveDisplayAdInfoOrBuilder getLegacyResponsiveDisplayAdOrBuilder();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppAdInfo app_ad = 29;</code>
* @return Whether the appAd field is set.
*/
boolean hasAppAd();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppAdInfo app_ad = 29;</code>
* @return The appAd.
*/
com.google.ads.googleads.v9.common.AppAdInfo getAppAd();
/**
* <pre>
* Details pertaining to an app ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppAdInfo app_ad = 29;</code>
*/
com.google.ads.googleads.v9.common.AppAdInfoOrBuilder getAppAdOrBuilder();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the legacyAppInstallAd field is set.
*/
boolean hasLegacyAppInstallAd();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The legacyAppInstallAd.
*/
com.google.ads.googleads.v9.common.LegacyAppInstallAdInfo getLegacyAppInstallAd();
/**
* <pre>
* Immutable. Details pertaining to a legacy app install ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LegacyAppInstallAdInfo legacy_app_install_ad = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v9.common.LegacyAppInstallAdInfoOrBuilder getLegacyAppInstallAdOrBuilder();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
* @return Whether the responsiveDisplayAd field is set.
*/
boolean hasResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
* @return The responsiveDisplayAd.
*/
com.google.ads.googleads.v9.common.ResponsiveDisplayAdInfo getResponsiveDisplayAd();
/**
* <pre>
* Details pertaining to a responsive display ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ResponsiveDisplayAdInfo responsive_display_ad = 31;</code>
*/
com.google.ads.googleads.v9.common.ResponsiveDisplayAdInfoOrBuilder getResponsiveDisplayAdOrBuilder();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LocalAdInfo local_ad = 32;</code>
* @return Whether the localAd field is set.
*/
boolean hasLocalAd();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LocalAdInfo local_ad = 32;</code>
* @return The localAd.
*/
com.google.ads.googleads.v9.common.LocalAdInfo getLocalAd();
/**
* <pre>
* Details pertaining to a local ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.LocalAdInfo local_ad = 32;</code>
*/
com.google.ads.googleads.v9.common.LocalAdInfoOrBuilder getLocalAdOrBuilder();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
* @return Whether the displayUploadAd field is set.
*/
boolean hasDisplayUploadAd();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
* @return The displayUploadAd.
*/
com.google.ads.googleads.v9.common.DisplayUploadAdInfo getDisplayUploadAd();
/**
* <pre>
* Details pertaining to a display upload ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.DisplayUploadAdInfo display_upload_ad = 33;</code>
*/
com.google.ads.googleads.v9.common.DisplayUploadAdInfoOrBuilder getDisplayUploadAdOrBuilder();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
* @return Whether the appEngagementAd field is set.
*/
boolean hasAppEngagementAd();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
* @return The appEngagementAd.
*/
com.google.ads.googleads.v9.common.AppEngagementAdInfo getAppEngagementAd();
/**
* <pre>
* Details pertaining to an app engagement ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppEngagementAdInfo app_engagement_ad = 34;</code>
*/
com.google.ads.googleads.v9.common.AppEngagementAdInfoOrBuilder getAppEngagementAdOrBuilder();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
* @return Whether the shoppingComparisonListingAd field is set.
*/
boolean hasShoppingComparisonListingAd();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
* @return The shoppingComparisonListingAd.
*/
com.google.ads.googleads.v9.common.ShoppingComparisonListingAdInfo getShoppingComparisonListingAd();
/**
* <pre>
* Details pertaining to a Shopping Comparison Listing ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.ShoppingComparisonListingAdInfo shopping_comparison_listing_ad = 36;</code>
*/
com.google.ads.googleads.v9.common.ShoppingComparisonListingAdInfoOrBuilder getShoppingComparisonListingAdOrBuilder();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
* @return Whether the smartCampaignAd field is set.
*/
boolean hasSmartCampaignAd();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
* @return The smartCampaignAd.
*/
com.google.ads.googleads.v9.common.SmartCampaignAdInfo getSmartCampaignAd();
/**
* <pre>
* Details pertaining to a Smart campaign ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.SmartCampaignAdInfo smart_campaign_ad = 48;</code>
*/
com.google.ads.googleads.v9.common.SmartCampaignAdInfoOrBuilder getSmartCampaignAdOrBuilder();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
* @return Whether the appPreRegistrationAd field is set.
*/
boolean hasAppPreRegistrationAd();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
* @return The appPreRegistrationAd.
*/
com.google.ads.googleads.v9.common.AppPreRegistrationAdInfo getAppPreRegistrationAd();
/**
* <pre>
* Details pertaining to an app pre-registration ad.
* </pre>
*
* <code>.google.ads.googleads.v9.common.AppPreRegistrationAdInfo app_pre_registration_ad = 50;</code>
*/
com.google.ads.googleads.v9.common.AppPreRegistrationAdInfoOrBuilder getAppPreRegistrationAdOrBuilder();
public com.google.ads.googleads.v9.resources.Ad.AdDataCase getAdDataCase();
}
| |
/*
* Copyright (C) 2008 ZXing authors
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.barcodeeye.scan;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import android.app.AlertDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.StrictMode;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.github.barcodeeye.BaseGlassActivity;
import com.github.barcodeeye.R;
import com.github.barcodeeye.image.ImageManager;
import com.github.barcodeeye.migrated.AmbientLightManager;
import com.github.barcodeeye.migrated.BeepManager;
import com.github.barcodeeye.migrated.FinishListener;
import com.github.barcodeeye.migrated.InactivityTimer;
import com.github.barcodeeye.scan.result.ResultProcessor;
import com.github.barcodeeye.scan.result.ResultProcessorFactory;
import com.github.barcodeeye.scan.ui.ViewGraphView;
import com.github.barcodeeye.scan.ui.ViewfinderView;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.DecodeHintType;
import com.google.zxing.Result;
import com.google.zxing.ResultMetadataType;
import com.google.zxing.ResultPoint;
import com.google.zxing.client.android.camera.CameraManager;
/**
* This activity opens the camera and does the actual scanning on a background
* thread. It draws a
* viewfinder to help the user place the barcode correctly, shows feedback as
* the image processing
* is happening, and then overlays the results when a scan is successful.
*
* @author dswitkin@google.com (Daniel Switkin)
* @author Sean Owen
*/
public final class CaptureActivity extends BaseGlassActivity implements
SurfaceHolder.Callback {
private static final String GRAPH_URL = "https://chart.googleapis.com/chart?chxt=x,y&chxl=0:%7CJan%7CFeb%7CMarch%7CApril%7CMay%7C1:%7CMin%7CMid%7CMax&cht=lc&chd=s:cEAELFJHHHKUju9uuXUc&chco=76A4FB&chls=2.0&chs=5&chf=c,s,000000|bg,s,000000&chs=470x270&&chco=ffffff";
private static final String IMAGE_PREFIX = "BarcodeEye_";
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final Collection<ResultMetadataType> DISPLAYABLE_METADATA_TYPES = EnumSet
.of(ResultMetadataType.ISSUE_NUMBER,
ResultMetadataType.SUGGESTED_PRICE,
ResultMetadataType.ERROR_CORRECTION_LEVEL,
ResultMetadataType.POSSIBLE_COUNTRY);
private CameraManager mCameraManager;
private CaptureActivityHandler mHandler;
private Result mSavedResultToShow;
private ViewfinderView mViewfinderView;
private boolean mHasSurface;
private Map<DecodeHintType, ?> mDecodeHints;
private InactivityTimer mInactivityTimer;
private BeepManager mBeepManager;
private AmbientLightManager mAmbientLightManager;
private ImageManager mImageManager;
private boolean postProcessing = false;
public static Intent newIntent(Context context) {
Intent intent = new Intent(context, CaptureActivity.class);
return intent;
}
public ViewfinderView getViewfinderView() {
return mViewfinderView;
}
public Handler getHandler() {
return mHandler;
}
CameraManager getCameraManager() {
return mCameraManager;
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_capture);
mImageManager = new ImageManager(this);
mHasSurface = false;
mInactivityTimer = new InactivityTimer(this);
mBeepManager = new BeepManager(this);
mAmbientLightManager = new AmbientLightManager(this);
mViewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
}
@Override
protected void onResume() {
super.onResume();
// CameraManager must be initialized here, not in onCreate(). This is necessary because we don't
// want to open the camera driver and measure the screen size if we're going to show the help on
// first launch. That led to bugs where the scanning rectangle was the wrong size and partially
// off screen.
mCameraManager = new CameraManager(getApplication());
mViewfinderView.setCameraManager(mCameraManager);
mHandler = null;
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
if (mHasSurface) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceHolder.addCallback(this);
}
mBeepManager.updatePrefs();
mAmbientLightManager.start(mCameraManager);
mInactivityTimer.onResume();
}
@Override
protected void onPause() {
if (mHandler != null) {
mHandler.quitSynchronously();
mHandler = null;
}
mInactivityTimer.onPause();
mAmbientLightManager.stop();
mCameraManager.closeDriver();
if (!mHasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
super.onPause();
}
@Override
protected boolean onTap() {
openOptionsMenu();
return super.onTap();
}
@Override
protected void onDestroy() {
mInactivityTimer.shutdown();
super.onDestroy();
}
private void decodeOrStoreSavedBitmap(Bitmap bitmap, Result result) {
// Bitmap isn't used yet -- will be used soon
if (mHandler == null) {
mSavedResultToShow = result;
} else {
if (result != null) {
mSavedResultToShow = result;
}
if (mSavedResultToShow != null) {
Message message = Message.obtain(mHandler,
R.id.decode_succeeded, mSavedResultToShow);
mHandler.sendMessage(message);
}
mSavedResultToShow = null;
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null) {
Log.e(TAG,
"*** WARNING *** surfaceCreated() gave us a null surface!");
}
if (!mHasSurface) {
mHasSurface = true;
initCamera(holder);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mHasSurface = false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
}
/**
* A valid barcode has been found, so give an indication of success and show
* the results.
*
* @param rawResult
* The contents of the barcode.
* @param scaleFactor
* amount by which thumbnail was scaled
* @param barcode
* A greyscale bitmap of the camera data which was decoded.
*/
public void handleDecode(Result rawResult, Bitmap barcode, float scaleFactor) {
mInactivityTimer.onActivity();
boolean fromLiveScan = barcode != null;
if (fromLiveScan) {
mBeepManager.playBeepSoundAndVibrate();
drawResultPoints(barcode, scaleFactor, rawResult, getResources()
.getColor(R.color.result_points));
mViewfinderView.processed = true;
Log.d("i fo", rawResult.getText());
mViewfinderView.showText = (rawResult.getText().equals("7"));
}
//handleDecodeInternally(rawResult, barcode);
}
/**
* Superimpose a line for 1D or dots for 2D to highlight the key features of
* the barcode.
*
* @param barcode
* A bitmap of the captured image.
* @param scaleFactor
* amount by which thumbnail was scaled
* @param rawResult
* The decoded results which contains the points to draw.
*/
private static void drawResultPoints(Bitmap barcode, float scaleFactor,
Result rawResult, int color) {
ResultPoint[] points = rawResult.getResultPoints();
if (points != null && points.length > 0) {
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
paint.setColor(color);
if (points.length == 2) {
paint.setStrokeWidth(4.0f);
drawLine(canvas, paint, points[0], points[1], scaleFactor);
} else if (points.length == 4
&& (rawResult.getBarcodeFormat() == BarcodeFormat.UPC_A || rawResult
.getBarcodeFormat() == BarcodeFormat.EAN_13)) {
// Hacky special case -- draw two lines, for the barcode and metadata
drawLine(canvas, paint, points[0], points[1], scaleFactor);
drawLine(canvas, paint, points[2], points[3], scaleFactor);
} else {
paint.setStrokeWidth(10.0f);
for (ResultPoint point : points) {
if (point != null) {
canvas.drawPoint(scaleFactor * point.getX(),
scaleFactor * point.getY(), paint);
}
}
}
}
}
private static void drawUrlImage(Bitmap barcode, float scaleFactor, Result rawResult, int color) {
ResultPoint[] points = rawResult.getResultPoints();
Canvas canvas = new Canvas(barcode);
Paint paint = new Paint();
canvas.drawBitmap(getBitmapFromURL(GRAPH_URL), 0,0, paint);
}
public static Bitmap getBitmapFromURL(String src) {
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
try {
URL url = new URL(src);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream();
Bitmap myBitmap = BitmapFactory.decodeStream(input);
return myBitmap;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private static void drawLine(Canvas canvas, Paint paint, ResultPoint a,
ResultPoint b, float scaleFactor) {
if (a != null && b != null) {
canvas.drawLine(scaleFactor * a.getX(), scaleFactor * a.getY(),
scaleFactor * b.getX(), scaleFactor * b.getY(), paint);
}
}
// Put up our own UI for how to handle the decoded contents.
private void handleDecodeInternally(Result rawResult, Bitmap barcode) {
Uri imageUri = null;
String imageName = IMAGE_PREFIX + System.currentTimeMillis() + ".png";
Log.v(TAG, "Saving image as: " + imageName);
try {
imageUri = mImageManager.saveImage(imageName, barcode);
} catch (IOException e) {
Log.e(TAG, "Failed to save image!", e);
}
ResultProcessor<?> processor = ResultProcessorFactory.makeResultProcessor(this, rawResult, imageUri);
startActivity(GraphActivity.newIntent(this));
//startActivity(ResultsActivity.newIntent(this, processor.getCardResults()));
}
private void initCamera(SurfaceHolder surfaceHolder) {
if (surfaceHolder == null) {
throw new IllegalStateException("No SurfaceHolder provided");
}
if (mCameraManager.isOpen()) {
Log.w(TAG,
"initCamera() while already open -- late SurfaceView callback?");
return;
}
try {
mCameraManager.openDriver(surfaceHolder);
// Creating the handler starts the preview, which can also throw a RuntimeException.
if (mHandler == null) {
mHandler = new CaptureActivityHandler(this, null, mDecodeHints,
null, mCameraManager);
}
decodeOrStoreSavedBitmap(null, null);
} catch (IOException e) {
Log.w(TAG, e);
displayFrameworkBugMessageAndExit();
} catch (InterruptedException e) {
Log.w(TAG, e);
displayFrameworkBugMessageAndExit();
}
}
/**
* FIXME: This should be a glass compatible view (Card)
*/
private void displayFrameworkBugMessageAndExit() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.app_name));
builder.setMessage(getString(R.string.msg_camera_framework_bug));
builder.setPositiveButton(R.string.button_ok, new FinishListener(this));
builder.setOnCancelListener(new FinishListener(this));
builder.show();
}
public void restartPreviewAfterDelay(long delayMS) {
if (mHandler != null) {
mHandler.sendEmptyMessageDelayed(R.id.restart_preview, delayMS);
}
}
public void drawViewfinder() {
mViewfinderView.drawViewfinder();
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.swt.examples.controlexample;
import org.eclipse.swt.*;
import org.eclipse.swt.widgets.*;
import org.eclipse.swt.layout.*;
import org.eclipse.swt.events.*;
class MenuTab extends Tab {
/* Widgets added to the "Menu Style", "MenuItem Style" and "Other" groups */
Button barButton, dropDownButton, popUpButton, noRadioGroupButton, leftToRightButton, rightToLeftButton;
Button checkButton, cascadeButton, pushButton, radioButton, separatorButton;
Button imagesButton, acceleratorsButton, mnemonicsButton, subMenuButton, subSubMenuButton;
Button createButton, closeAllButton;
Group menuItemStyleGroup;
/* Variables used to track the open shells */
int shellCount = 0;
Shell [] shells = new Shell [4];
/**
* Creates the Tab within a given instance of ControlExample.
*/
MenuTab(ControlExample instance) {
super(instance);
}
/**
* Close all the example shells.
*/
void closeAllShells() {
for (int i = 0; i<shellCount; i++) {
if (shells[i] != null & !shells [i].isDisposed ()) {
shells [i].dispose();
}
}
shellCount = 0;
}
/**
* Handle the Create button selection event.
*
* @param event org.eclipse.swt.events.SelectionEvent
*/
public void createButtonSelected(SelectionEvent event) {
/*
* Remember the example shells so they
* can be disposed by the user.
*/
if (shellCount >= shells.length) {
Shell [] newShells = new Shell [shells.length + 4];
System.arraycopy (shells, 0, newShells, 0, shells.length);
shells = newShells;
}
int orientation = 0;
if (leftToRightButton.getSelection()) orientation |= SWT.LEFT_TO_RIGHT;
if (rightToLeftButton.getSelection()) orientation |= SWT.RIGHT_TO_LEFT;
int radioBehavior = 0;
if (noRadioGroupButton.getSelection()) radioBehavior |= SWT.NO_RADIO_GROUP;
/* Create the shell and menu(s) */
Shell shell = new Shell (SWT.SHELL_TRIM | orientation);
shells [shellCount] = shell;
if (barButton.getSelection ()) {
/* Create menu bar. */
Menu menuBar = new Menu(shell, SWT.BAR | radioBehavior);
shell.setMenuBar(menuBar);
hookListeners(menuBar);
if (dropDownButton.getSelection() && cascadeButton.getSelection()) {
/* Create cascade button and drop-down menu in menu bar. */
MenuItem item = new MenuItem(menuBar, SWT.CASCADE);
item.setText(getMenuItemText("Cascade"));
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciOpenFolder]);
hookListeners(item);
Menu dropDownMenu = new Menu(shell, SWT.DROP_DOWN | radioBehavior);
item.setMenu(dropDownMenu);
hookListeners(dropDownMenu);
/* Create various menu items, depending on selections. */
createMenuItems(dropDownMenu, subMenuButton.getSelection(), subSubMenuButton.getSelection());
}
}
if (popUpButton.getSelection()) {
/* Create pop-up menu. */
Menu popUpMenu = new Menu(shell, SWT.POP_UP | radioBehavior);
shell.setMenu(popUpMenu);
hookListeners(popUpMenu);
/* Create various menu items, depending on selections. */
createMenuItems(popUpMenu, subMenuButton.getSelection(), subSubMenuButton.getSelection());
}
/* Set the size, title and open the shell. */
shell.setSize (300, 100);
shell.setText (ControlExample.getResourceString("Title") + shellCount);
shell.addPaintListener(new PaintListener() {
public void paintControl(PaintEvent e) {
e.gc.drawString(ControlExample.getResourceString("PopupMenuHere"), 20, 20);
}
});
shell.open ();
shellCount++;
}
/**
* Creates the "Control" group.
*/
@Override
void createControlGroup () {
/*
* Create the "Control" group. This is the group on the
* right half of each example tab. For MenuTab, it consists of
* the Menu style group, the MenuItem style group and the 'other' group.
*/
controlGroup = new Group (tabFolderPage, SWT.NONE);
controlGroup.setLayout (new GridLayout (2, true));
controlGroup.setLayoutData (new GridData (GridData.HORIZONTAL_ALIGN_FILL | GridData.VERTICAL_ALIGN_FILL));
controlGroup.setText (ControlExample.getResourceString("Parameters"));
/* Create a group for the menu style controls */
styleGroup = new Group (controlGroup, SWT.NONE);
styleGroup.setLayout (new GridLayout ());
styleGroup.setLayoutData (new GridData (GridData.HORIZONTAL_ALIGN_FILL | GridData.VERTICAL_ALIGN_FILL));
styleGroup.setText (ControlExample.getResourceString("Menu_Styles"));
/* Create a group for the menu item style controls */
menuItemStyleGroup = new Group (controlGroup, SWT.NONE);
menuItemStyleGroup.setLayout (new GridLayout ());
menuItemStyleGroup.setLayoutData (new GridData(GridData.HORIZONTAL_ALIGN_FILL | GridData.VERTICAL_ALIGN_FILL));
menuItemStyleGroup.setText (ControlExample.getResourceString("MenuItem_Styles"));
/* Create a group for the 'other' controls */
otherGroup = new Group (controlGroup, SWT.NONE);
otherGroup.setLayout (new GridLayout ());
otherGroup.setLayoutData (new GridData(GridData.HORIZONTAL_ALIGN_FILL | GridData.VERTICAL_ALIGN_FILL));
otherGroup.setText (ControlExample.getResourceString("Other"));
}
/**
* Creates the "Control" widget children.
*/
@Override
void createControlWidgets () {
/* Create the menu style buttons */
barButton = new Button (styleGroup, SWT.CHECK);
barButton.setText ("SWT.BAR");
dropDownButton = new Button (styleGroup, SWT.CHECK);
dropDownButton.setText ("SWT.DROP_DOWN");
popUpButton = new Button (styleGroup, SWT.CHECK);
popUpButton.setText ("SWT.POP_UP");
noRadioGroupButton = new Button (styleGroup, SWT.CHECK);
noRadioGroupButton.setText ("SWT.NO_RADIO_GROUP");
leftToRightButton = new Button (styleGroup, SWT.RADIO);
leftToRightButton.setText ("SWT.LEFT_TO_RIGHT");
leftToRightButton.setSelection(true);
rightToLeftButton = new Button (styleGroup, SWT.RADIO);
rightToLeftButton.setText ("SWT.RIGHT_TO_LEFT");
/* Create the menu item style buttons */
cascadeButton = new Button (menuItemStyleGroup, SWT.CHECK);
cascadeButton.setText ("SWT.CASCADE");
checkButton = new Button (menuItemStyleGroup, SWT.CHECK);
checkButton.setText ("SWT.CHECK");
pushButton = new Button (menuItemStyleGroup, SWT.CHECK);
pushButton.setText ("SWT.PUSH");
radioButton = new Button (menuItemStyleGroup, SWT.CHECK);
radioButton.setText ("SWT.RADIO");
separatorButton = new Button (menuItemStyleGroup, SWT.CHECK);
separatorButton.setText ("SWT.SEPARATOR");
/* Create the 'other' buttons */
enabledButton = new Button(otherGroup, SWT.CHECK);
enabledButton.setText(ControlExample.getResourceString("Enabled"));
enabledButton.setSelection(true);
imagesButton = new Button (otherGroup, SWT.CHECK);
imagesButton.setText (ControlExample.getResourceString("Images"));
acceleratorsButton = new Button (otherGroup, SWT.CHECK);
acceleratorsButton.setText (ControlExample.getResourceString("Accelerators"));
mnemonicsButton = new Button (otherGroup, SWT.CHECK);
mnemonicsButton.setText (ControlExample.getResourceString("Mnemonics"));
subMenuButton = new Button (otherGroup, SWT.CHECK);
subMenuButton.setText (ControlExample.getResourceString("SubMenu"));
subSubMenuButton = new Button (otherGroup, SWT.CHECK);
subSubMenuButton.setText (ControlExample.getResourceString("SubSubMenu"));
/* Create the "create" and "closeAll" buttons (and a 'filler' label to place them) */
new Label(controlGroup, SWT.NONE);
createButton = new Button (controlGroup, SWT.NONE);
createButton.setLayoutData (new GridData (GridData.HORIZONTAL_ALIGN_END));
createButton.setText (ControlExample.getResourceString("Create_Shell"));
closeAllButton = new Button (controlGroup, SWT.NONE);
closeAllButton.setLayoutData (new GridData (GridData.HORIZONTAL_ALIGN_BEGINNING));
closeAllButton.setText (ControlExample.getResourceString("Close_All_Shells"));
/* Add the listeners */
createButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
createButtonSelected(e);
}
});
closeAllButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
closeAllShells ();
}
});
subMenuButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
subSubMenuButton.setEnabled (subMenuButton.getSelection ());
}
});
/* Set the default state */
barButton.setSelection (true);
dropDownButton.setSelection (true);
popUpButton.setSelection (true);
cascadeButton.setSelection (true);
checkButton.setSelection (true);
pushButton.setSelection (true);
radioButton.setSelection (true);
separatorButton.setSelection (true);
subSubMenuButton.setEnabled (subMenuButton.getSelection ());
}
/* Create various menu items, depending on selections. */
void createMenuItems(Menu menu, boolean createSubMenu, boolean createSubSubMenu) {
MenuItem item;
if (pushButton.getSelection()) {
item = new MenuItem(menu, SWT.PUSH);
item.setText(getMenuItemText("Push"));
if (acceleratorsButton.getSelection()) item.setAccelerator(SWT.MOD1 + SWT.MOD2 + 'P');
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciClosedFolder]);
item.setEnabled(enabledButton.getSelection());
hookListeners(item);
}
if (separatorButton.getSelection()) {
new MenuItem(menu, SWT.SEPARATOR);
}
if (checkButton.getSelection()) {
item = new MenuItem(menu, SWT.CHECK);
item.setText(getMenuItemText("Check"));
if (acceleratorsButton.getSelection()) item.setAccelerator(SWT.MOD1 + SWT.MOD2 + 'C');
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciOpenFolder]);
item.setEnabled(enabledButton.getSelection());
hookListeners(item);
}
if (radioButton.getSelection()) {
item = new MenuItem(menu, SWT.RADIO);
item.setText(getMenuItemText("1Radio"));
if (acceleratorsButton.getSelection()) item.setAccelerator(SWT.MOD1 + SWT.MOD2 + '1');
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciTarget]);
item.setSelection(true);
item.setEnabled(enabledButton.getSelection());
hookListeners(item);
item = new MenuItem(menu, SWT.RADIO);
item.setText(getMenuItemText("2Radio"));
if (acceleratorsButton.getSelection()) item.setAccelerator(SWT.MOD1 + SWT.MOD2 + '2');
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciTarget]);
item.setEnabled(enabledButton.getSelection());
hookListeners(item);
}
if (createSubMenu && cascadeButton.getSelection()) {
/* Create cascade button and drop-down menu for the sub-menu. */
item = new MenuItem(menu, SWT.CASCADE);
item.setText(getMenuItemText("Cascade"));
if (imagesButton.getSelection()) item.setImage(instance.images[ControlExample.ciOpenFolder]);
hookListeners(item);
Menu subMenu = new Menu(menu.getShell(), SWT.DROP_DOWN);
item.setMenu(subMenu);
item.setEnabled(enabledButton.getSelection());
hookListeners(subMenu);
createMenuItems(subMenu, createSubSubMenu, false);
}
}
String getMenuItemText(String item) {
boolean cascade = item.equals("Cascade");
boolean mnemonic = mnemonicsButton.getSelection();
boolean accelerator = acceleratorsButton.getSelection();
char acceleratorKey = item.charAt(0);
if (mnemonic && accelerator && !cascade) {
return ControlExample.getResourceString(item + "WithMnemonic") + "\tCtrl+Shift+" + acceleratorKey;
}
if (accelerator && !cascade) {
return ControlExample.getResourceString(item) + "\tCtrl+Shift+" + acceleratorKey;
}
if (mnemonic) {
return ControlExample.getResourceString(item + "WithMnemonic");
}
return ControlExample.getResourceString(item);
}
/**
* Gets the text for the tab folder item.
*/
@Override
String getTabText () {
return "Menu";
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event.listener;
import com.facebook.buck.artifact_cache.ArtifactCacheConnectEvent;
import com.facebook.buck.artifact_cache.ArtifactCacheEvent;
import com.facebook.buck.cli.CommandEvent;
import com.facebook.buck.event.ActionGraphEvent;
import com.facebook.buck.event.ArtifactCompressionEvent;
import com.facebook.buck.event.BuckEvent;
import com.facebook.buck.event.BuckEventListener;
import com.facebook.buck.event.ChromeTraceEvent;
import com.facebook.buck.event.CompilerPluginDurationEvent;
import com.facebook.buck.event.InstallEvent;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.event.StartActivityEvent;
import com.facebook.buck.event.TraceEvent;
import com.facebook.buck.event.UninstallEvent;
import com.facebook.buck.io.PathListing;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.json.ParseBuckFileEvent;
import com.facebook.buck.jvm.java.AnnotationProcessingEvent;
import com.facebook.buck.jvm.java.tracing.JavacPhaseEvent;
import com.facebook.buck.log.CommandThreadFactory;
import com.facebook.buck.log.InvocationInfo;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.parser.ParseEvent;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleEvent;
import com.facebook.buck.rules.TestSummaryEvent;
import com.facebook.buck.step.StepEvent;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.util.BestCompressionGZIPOutputStream;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.Optionals;
import com.facebook.buck.util.concurrent.MostExecutors;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.CaseFormat;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.eventbus.Subscribe;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Path;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Logs events to a json file formatted to be viewed in Chrome Trace View (chrome://tracing).
*/
public class ChromeTraceBuildListener implements BuckEventListener {
private static final LoadingCache<String, String> CONVERTED_EVENT_ID_CACHE = CacheBuilder
.newBuilder()
.weakValues()
.build(new CacheLoader<String, String>() {
@Override
public String load(String key) throws Exception {
return CaseFormat
.UPPER_CAMEL
.converterTo(CaseFormat.LOWER_UNDERSCORE)
.convert(key)
.intern();
}
});
private static final Logger LOG = Logger.get(ChromeTraceBuildListener.class);
private static final int TIMEOUT_SECONDS = 30;
private final ProjectFilesystem projectFilesystem;
private final Clock clock;
private final int tracesToKeep;
private final boolean compressTraces;
private final ObjectMapper mapper;
private final ThreadLocal<SimpleDateFormat> dateFormat;
private final Path tracePath;
private final OutputStream traceStream;
private final JsonGenerator jsonGenerator;
private final InvocationInfo invocationInfo;
private final ExecutorService outputExecutor;
public ChromeTraceBuildListener(
ProjectFilesystem projectFilesystem,
InvocationInfo invocationInfo,
Clock clock,
ObjectMapper objectMapper,
int tracesToKeep,
boolean compressTraces) throws IOException {
this(
projectFilesystem,
invocationInfo,
clock,
objectMapper,
Locale.US,
TimeZone.getDefault(),
tracesToKeep,
compressTraces);
}
@VisibleForTesting
ChromeTraceBuildListener(
ProjectFilesystem projectFilesystem,
InvocationInfo invocationInfo,
Clock clock,
ObjectMapper objectMapper,
final Locale locale,
final TimeZone timeZone,
int tracesToKeep,
boolean compressTraces) throws IOException {
this.invocationInfo = invocationInfo;
this.projectFilesystem = projectFilesystem;
this.clock = clock;
this.mapper = objectMapper;
this.dateFormat = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd.HH-mm-ss", locale);
dateFormat.setTimeZone(timeZone);
return dateFormat;
}
};
this.tracesToKeep = tracesToKeep;
this.compressTraces = compressTraces;
this.outputExecutor = MostExecutors.newSingleThreadExecutor(
new CommandThreadFactory(getClass().getName()));
TracePathAndStream tracePathAndStream = createPathAndStream(invocationInfo);
this.tracePath = tracePathAndStream.getPath();
this.traceStream = tracePathAndStream.getStream();
this.jsonGenerator = objectMapper.getFactory().createGenerator(this.traceStream);
this.jsonGenerator.writeStartArray();
addProcessMetadataEvent();
}
@VisibleForTesting
Path getTracePath() {
return tracePath;
}
private void addProcessMetadataEvent() {
submitTraceEvent(
new ChromeTraceEvent(
"buck",
"process_name",
ChromeTraceEvent.Phase.METADATA,
/* processId */ 0,
/* threadId */ 0,
/* microTime */ 0,
/* microThreadUserTime */ 0,
ImmutableMap.of("name", "buck")));
}
@VisibleForTesting
void deleteOldTraces() {
if (!projectFilesystem.exists(invocationInfo.getLogDirectoryPath())) {
return;
}
Path traceDirectory = projectFilesystem.getPathForRelativePath(
invocationInfo.getLogDirectoryPath());
try {
for (Path path : PathListing.listMatchingPathsWithFilters(
traceDirectory,
"build.*.trace",
PathListing.GET_PATH_MODIFIED_TIME,
PathListing.FilterMode.EXCLUDE,
Optional.of(tracesToKeep),
Optional.<Long>absent())) {
projectFilesystem.deleteFileAtPath(path);
}
} catch (IOException e) {
LOG.error(e, "Couldn't list paths in trace directory %s", traceDirectory);
}
}
private TracePathAndStream createPathAndStream(InvocationInfo invocationInfo) {
String filenameTime = dateFormat.get().format(new Date(clock.currentTimeMillis()));
String traceName =
String.format("build.%s.%s.trace", filenameTime, invocationInfo.getBuildId());
if (compressTraces) {
traceName = traceName + ".gz";
}
Path tracePath = invocationInfo.getLogDirectoryPath().resolve(traceName);
try {
projectFilesystem.createParentDirs(tracePath);
OutputStream stream = projectFilesystem.newFileOutputStream(tracePath);
if (compressTraces) {
stream = new BestCompressionGZIPOutputStream(stream, true);
}
return new TracePathAndStream(tracePath, stream);
} catch (IOException e) {
throw new HumanReadableException(e, "Unable to write trace file: " + e);
}
}
@Override
public void outputTrace(BuildId buildId) {
try {
LOG.debug("Writing Chrome trace to %s", tracePath);
outputExecutor.shutdown();
try {
if (!outputExecutor.awaitTermination(TIMEOUT_SECONDS, TimeUnit.SECONDS)) {
LOG.warn("Failed to log buck trace %s. Trace might be corrupt", tracePath);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
jsonGenerator.writeEndArray();
jsonGenerator.close();
traceStream.close();
String symlinkName = compressTraces ? "build.trace.gz" : "build.trace";
Path symlinkPath = projectFilesystem.getBuckPaths().getLogDir().resolve(symlinkName);
projectFilesystem.createSymLink(
projectFilesystem.resolve(symlinkPath),
projectFilesystem.resolve(tracePath),
true);
deleteOldTraces();
} catch (IOException e) {
throw new HumanReadableException(e, "Unable to write trace file: " + e);
}
}
@Subscribe
public void commandStarted(CommandEvent.Started started) {
writeChromeTraceEvent("buck",
started.getCommandName(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of(
"command_args", Joiner.on(' ').join(started.getArgs())
),
started);
}
@Subscribe
public void commandFinished(CommandEvent.Finished finished) {
writeChromeTraceEvent("buck",
finished.getCommandName(),
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"command_args", Joiner.on(' ').join(finished.getArgs()),
"daemon", Boolean.toString(finished.isDaemon())),
finished);
}
@Subscribe
public void buildStarted(BuildEvent.Started started) {
writeChromeTraceEvent("buck",
"build",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public synchronized void buildFinished(BuildEvent.Finished finished) {
writeChromeTraceEvent("buck",
"build",
ChromeTraceEvent.Phase.END,
ImmutableMap.<String, String>of(),
finished);
}
@Subscribe
public void ruleStarted(BuildRuleEvent.Started started) {
BuildRule buildRule = started.getBuildRule();
writeChromeTraceEvent("buck",
buildRule.getFullyQualifiedName(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void ruleFinished(BuildRuleEvent.Finished finished) {
writeChromeTraceEvent("buck",
finished.getBuildRule().getFullyQualifiedName(),
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"cache_result", finished.getCacheResult().toString().toLowerCase(),
"success_type",
finished.getSuccessType().transform(Functions.toStringFunction()).or("failed")
),
finished);
}
@Subscribe
public void ruleResumed(BuildRuleEvent.Resumed resumed) {
BuildRule buildRule = resumed.getBuildRule();
writeChromeTraceEvent(
"buck",
buildRule.getFullyQualifiedName(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", resumed.getRuleKey()),
resumed);
}
@Subscribe
public void ruleSuspended(BuildRuleEvent.Suspended suspended) {
BuildRule buildRule = suspended.getBuildRule();
writeChromeTraceEvent("buck",
buildRule.getFullyQualifiedName(),
ChromeTraceEvent.Phase.END,
ImmutableMap.of("rule_key", suspended.getRuleKey()),
suspended);
}
@Subscribe
public void stepStarted(StepEvent.Started started) {
writeChromeTraceEvent("buck",
started.getShortStepName(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void stepFinished(StepEvent.Finished finished) {
writeChromeTraceEvent("buck",
finished.getShortStepName(),
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"description", finished.getDescription(),
"exit_code", Integer.toString(finished.getExitCode())),
finished);
}
@Subscribe
public void parseStarted(ParseEvent.Started started) {
writeChromeTraceEvent("buck",
"parse",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void parseFinished(ParseEvent.Finished finished) {
writeChromeTraceEvent("buck",
"parse",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"targets",
Joiner.on(",").join(finished.getBuildTargets())),
finished);
}
@Subscribe
public void simplePerfEvent(SimplePerfEvent perfEvent) {
ChromeTraceEvent.Phase phase = null;
switch (perfEvent.getEventType()) {
case STARTED:
phase = ChromeTraceEvent.Phase.BEGIN;
break;
case FINISHED:
phase = ChromeTraceEvent.Phase.END;
break;
case UPDATED:
phase = ChromeTraceEvent.Phase.IMMEDIATE;
break;
}
if (phase == null) {
throw new IllegalStateException(
"Unsupported perf event type: " + perfEvent.getEventType());
}
try {
writeChromeTraceEvent(
"buck",
CONVERTED_EVENT_ID_CACHE.get(perfEvent.getEventId().getValue().intern()),
phase,
ImmutableMap.copyOf(
Maps.transformValues(perfEvent.getEventInfo(), Functions.toStringFunction())),
perfEvent);
} catch (ExecutionException e) {
LOG.warn("Unable to log perf event " + perfEvent, e);
}
}
@Subscribe
public void parseBuckFileStarted(ParseBuckFileEvent.Started started) {
writeChromeTraceEvent(
"buck",
"parse_file",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of(
"path",
started.getBuckFilePath().toString()),
started);
}
@Subscribe
public void parseBuckFileFinished(ParseBuckFileEvent.Finished finished) {
writeChromeTraceEvent(
"buck",
"parse_file",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"path",
finished.getBuckFilePath().toString(),
"num_rules",
Integer.toString(finished.getNumRules()),
"python_profile",
finished.getProfile()),
finished);
}
@Subscribe
public void actionGraphStarted(ActionGraphEvent.Started started) {
writeChromeTraceEvent(
"buck",
"action_graph",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void actionGraphFinished(ActionGraphEvent.Finished finished) {
writeChromeTraceEvent(
"buck",
"action_graph",
ChromeTraceEvent.Phase.END,
ImmutableMap.<String, String>of(),
finished);
}
@Subscribe
public void installStarted(InstallEvent.Started started) {
writeChromeTraceEvent("buck",
"install",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void installFinished(InstallEvent.Finished finished) {
writeChromeTraceEvent("buck",
"install",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"target", finished.getBuildTarget().getFullyQualifiedName(),
"success", Boolean.toString(finished.isSuccess())),
finished);
}
@Subscribe
public void startActivityStarted(StartActivityEvent.Started started) {
writeChromeTraceEvent("buck",
"start_activity",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void startActivityFinished(StartActivityEvent.Finished finished) {
writeChromeTraceEvent("buck",
"start_activity",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"target", finished.getBuildTarget().getFullyQualifiedName(),
"activity_name", finished.getActivityName(),
"success", Boolean.toString(finished.isSuccess())),
finished);
}
@Subscribe
public void uninstallStarted(UninstallEvent.Started started) {
writeChromeTraceEvent("buck",
"uninstall",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void uninstallFinished(UninstallEvent.Finished finished) {
writeChromeTraceEvent("buck",
"uninstall",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"package_name", finished.getPackageName(),
"success", Boolean.toString(finished.isSuccess())),
finished);
}
@Subscribe
public void artifactCacheEventStarted(ArtifactCacheEvent.Started started) {
writeChromeTraceEvent(
"buck",
started.getCategory(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", Joiner.on(", ").join(started.getRuleKeys())),
started);
}
@Subscribe
public void artifactCacheEventFinished(ArtifactCacheEvent.Finished finished) {
ImmutableMap.Builder<String, String> argumentsBuilder = ImmutableMap.<String, String>builder()
.put("success", Boolean.toString(finished.isSuccess()))
.put("rule_key", Joiner.on(", ").join(finished.getRuleKeys()));
Optionals.putIfPresent(finished.getCacheResult().transform(Functions.toStringFunction()),
"cache_result",
argumentsBuilder);
writeChromeTraceEvent("buck",
finished.getCategory(),
ChromeTraceEvent.Phase.END,
argumentsBuilder.build(),
finished);
}
@Subscribe
public void artifactCompressionStarted(ArtifactCompressionEvent.Started started) {
writeArtifactCompressionEvent(started, ChromeTraceEvent.Phase.BEGIN);
}
@Subscribe
public void artifactCompressionFinished(ArtifactCompressionEvent.Finished finished) {
writeArtifactCompressionEvent(finished, ChromeTraceEvent.Phase.END);
}
public void writeArtifactCompressionEvent(
ArtifactCompressionEvent event, ChromeTraceEvent.Phase phase) {
writeChromeTraceEvent(
"buck",
event.getCategory(),
phase,
ImmutableMap.of("rule_key", Joiner.on(", ").join(event.getRuleKeys())),
event);
}
@Subscribe
public void artifactConnectStarted(ArtifactCacheConnectEvent.Started started) {
writeChromeTraceEvent("buck",
"artifact_connect",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void artifactConnectFinished(ArtifactCacheConnectEvent.Finished finished) {
writeChromeTraceEvent("buck",
"artifact_connect",
ChromeTraceEvent.Phase.END,
ImmutableMap.<String, String>of(),
finished);
}
@Subscribe
public void javacPhaseStarted(JavacPhaseEvent.Started started) {
writeChromeTraceEvent(
"javac",
started.getPhase().toString(),
ChromeTraceEvent.Phase.BEGIN,
started.getArgs(),
started);
}
@Subscribe
public void javacPhaseFinished(JavacPhaseEvent.Finished finished) {
writeChromeTraceEvent(
"javac",
finished.getPhase().toString(),
ChromeTraceEvent.Phase.END,
finished.getArgs(),
finished);
}
@Subscribe
public void annotationProcessingStarted(AnnotationProcessingEvent.Started started) {
writeChromeTraceEvent(
started.getAnnotationProcessorName(),
started.getCategory(),
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.<String, String>of(),
started);
}
@Subscribe
public void annotationProcessingFinished(AnnotationProcessingEvent.Finished finished) {
writeChromeTraceEvent(
finished.getAnnotationProcessorName(),
finished.getCategory(),
ChromeTraceEvent.Phase.END,
ImmutableMap.<String, String>of(),
finished);
}
@Subscribe
public void compilerPluginDurationEventStarted(CompilerPluginDurationEvent.Started started) {
writeChromeTraceEvent(
started.getPluginName(),
started.getDurationName(),
ChromeTraceEvent.Phase.BEGIN,
started.getArgs(),
started);
}
@Subscribe
public void compilerPluginDurationEventFinished(CompilerPluginDurationEvent.Finished finished) {
writeChromeTraceEvent(
finished.getPluginName(),
finished.getDurationName(),
ChromeTraceEvent.Phase.END,
finished.getArgs(),
finished);
}
@Subscribe
public void traceEvent(TraceEvent event) {
writeChromeTraceEvent("buck",
event.getEventName(),
event.getPhase(),
event.getProperties(),
event);
}
@Subscribe
public void testStartedEvent(TestSummaryEvent.Started started) {
writeChromeTraceEvent("buck",
"test",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of(
"test_case_name", started.getTestCaseName(),
"test_name", started.getTestName()),
started);
}
@Subscribe
public void testFinishedEvent(TestSummaryEvent.Finished finished) {
writeChromeTraceEvent("buck",
"test",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"test_case_name", finished.getTestCaseName(),
"test_name", finished.getTestName()),
finished);
}
private void writeChromeTraceEvent(String category,
String name,
ChromeTraceEvent.Phase phase,
ImmutableMap<String, String> arguments,
final BuckEvent event) {
final ChromeTraceEvent chromeTraceEvent = new ChromeTraceEvent(category,
name,
phase,
0,
event.getThreadId(),
TimeUnit.NANOSECONDS.toMicros(event.getNanoTime()),
TimeUnit.NANOSECONDS.toMicros(event.getThreadUserNanoTime()),
arguments);
submitTraceEvent(chromeTraceEvent);
}
@SuppressWarnings("PMD.EmptyCatchBlock")
private void submitTraceEvent(final ChromeTraceEvent chromeTraceEvent) {
outputExecutor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
try {
mapper.writeValue(jsonGenerator, chromeTraceEvent);
} catch (IOException e) {
// Swallow any failures to write.
}
return null;
}
});
}
private class TracePathAndStream {
private final Path path;
private final OutputStream stream;
public TracePathAndStream(Path path, OutputStream stream) {
this.path = path;
this.stream = stream;
}
public Path getPath() {
return path;
}
public OutputStream getStream() {
return stream;
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on May 18, 2006
*/
package docking.widgets.filechooser;
import java.awt.*;
import java.awt.event.*;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import javax.swing.*;
import docking.event.mouse.GMouseListenerAdapter;
import docking.widgets.AutoLookup;
import docking.widgets.label.GDLabel;
import docking.widgets.list.GList;
import docking.widgets.list.GListAutoLookup;
import ghidra.util.exception.AssertException;
class DirectoryList extends GList<File> implements GhidraFileChooserDirectoryModelIf {
private static final int DEFAULT_ICON_SIZE = 16;
private static final int MIN_HEIGHT_PADDING = 5;
private GhidraFileChooser chooser;
private DirectoryListModel model;
private JLabel listEditorLabel;
private JTextField listEditorField;
private JPanel listEditor;
/** The file being edited */
private File editedFile;
/**
* Create a new DirectoryList instance.
*
* @param chooser the {@link GhidraFileChooser} this instance is nested in
* @param model the {@link DirectoryListModel}
* @param font the parent component's font, used to calculate row height in the list once
*/
DirectoryList(GhidraFileChooser chooser, DirectoryListModel model, Font font) {
super(model);
this.chooser = chooser;
this.model = model;
build(font);
}
private void build(Font font) {
setLayoutOrientation(JList.VERTICAL_WRAP);
FileListCellRenderer cellRenderer = new FileListCellRenderer(chooser);
setCellRenderer(cellRenderer);
// Enable the list to calculate the width of the cells on its own, but manually
// specify the height to ensure some padding between rows.
// We need the parent component's Font instead of using our
// own #getFont() because we are not a child of the parent yet and
// the font may be set to something other than the default.
// Use 1/3 of the line height of the font to ensure visually consistent
// padding between rows. (historically, 5px was used as the padding
// between the default 12pt (15px lineht) rows, so 15px lineht/5px padding
// equals .333 ratio.)
FontMetrics metrics = cellRenderer.getFontMetrics(font);
setFixedCellHeight(
Math.max(metrics.getHeight(), DEFAULT_ICON_SIZE) +
Math.max(metrics.getHeight() / 3, MIN_HEIGHT_PADDING));
setFixedCellWidth(-1);
addMouseListener(new GMouseListenerAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
super.mouseClicked(e);
// always end editing on a mouse click of any kind
listEditor.setVisible(false);
requestFocus();
}
@Override
public boolean shouldConsume(MouseEvent e) {
if (e.isPopupTrigger() && isEditing()) {
return true;
}
return false;
}
@Override
public void popupTriggered(MouseEvent e) {
maybeSelectItem(e);
}
@Override
public void doubleClickTriggered(MouseEvent e) {
handleDoubleClick();
}
});
addKeyListener(new KeyAdapter() {
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
e.consume();
handleEnterKey();
}
}
});
addListSelectionListener(e -> {
if (e.getValueIsAdjusting()) {
return;
}
updateChooserForSelection();
});
listEditorLabel = new GDLabel();
listEditorLabel.addMouseListener(new MouseAdapter() {
@Override
public void mouseReleased(MouseEvent e) {
int index = locationToIndex(new Point(listEditor.getX(), listEditor.getY()));
File file = model.getFile(index);
if (e.getClickCount() == 2) {
if (chooser.getModel().isDirectory(file)) {
chooser.setCurrentDirectory(file);
}
cancelListEdit();
}
}
});
listEditorField = new JTextField();
listEditorField.setName("LIST_EDITOR_FIELD");
listEditorField.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
cancelListEdit();
e.consume();
}
}
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
listEditor.setVisible(false);
e.consume();
}
else if (e.getKeyCode() == KeyEvent.VK_ENTER) {
String invalidFilenameMessage =
chooser.getInvalidFilenameMessage(listEditorField.getText());
if (invalidFilenameMessage != null) {
chooser.setStatusText(invalidFilenameMessage);
// keep the user in the field by not stopping the current edit
}
else {
stopListEdit();
}
e.consume();
}
}
});
listEditorField.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
// Tracker SCR 3358 - Keep changes on focus lost
stopListEdit();
}
});
listEditor = new JPanel(new BorderLayout());
listEditor.setBorder(BorderFactory.createLineBorder(Color.GRAY));
listEditor.add(listEditorLabel, BorderLayout.WEST);
listEditor.add(listEditorField, BorderLayout.CENTER);
listEditor.setBackground(Color.WHITE);
listEditorField.setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2));
add(listEditor);
}
private void handleEnterKey() {
int[] selectedIndices = getSelectedIndices();
if (selectedIndices.length == 0) {
chooser.okCallback();
// this implies the user has somehow put focus into the table, but has not
// made a selection...just let the chooser decide what to do
return;
}
if (selectedIndices.length > 1) {
// let the chooser decide what to do with multiple rows selected
chooser.okCallback();
return;
}
File file = model.getFile(selectedIndices[0]);
if (chooser.getModel().isDirectory(file)) {
chooser.setCurrentDirectory(file);
}
else {
chooser.userChoseFile(file);
}
}
private void maybeSelectItem(MouseEvent e) {
Point point = e.getPoint();
int index = locationToIndex(point);
if (index < 0) {
return;
}
setSelectedIndex(index);
}
private void handleDoubleClick() {
List<File> selectedFiles = new ArrayList<>();
int[] selectedIndices = getSelectedIndices();
for (int i : selectedIndices) {
selectedFiles.add(model.getFile(i));
}
if (selectedFiles.size() == 0 || selectedFiles.size() > 1) {
return; // not sure if this can happen, maybe with the Ctrl key pressed
}
File file = selectedFiles.get(0);
if (chooser.getModel().isDirectory(file)) {
chooser.setCurrentDirectory(file); // the user wants to navigate into the directory
}
else {
chooser.userChoseFile(file); // the user has chosen the file
}
}
private void updateChooserForSelection() {
List<File> selectedFiles = new ArrayList<>();
int[] selectedIndices = getSelectedIndices();
for (int index : selectedIndices) {
selectedFiles.add(model.getFile(index));
}
chooser.userSelectedFiles(selectedFiles);
}
@Override
protected AutoLookup createAutoLookup() {
return new GListAutoLookup<>(this) {
@Override
protected boolean canBinarySearchColumn(int column) {
return false;
}
};
}
@Override
public int[] getSelectedRows() {
return getSelectedIndices();
}
@Override
public File getSelectedFile() {
int index = getSelectedIndex();
if (index < 0) {
return null;
}
return model.getFile(index);
}
@Override
public File getFile(int row) {
return model.getFile(row);
}
@Override
public void edit() {
int index = getSelectedIndex();
editListCell(index);
}
@Override
public void setSelectedFile(File file) {
int[] selectedIndices = getSelectedIndices();
if (selectedIndices.length == 1) {
File selectedFile = model.getFile(selectedIndices[0]);
if (selectedFile.equals(file)) {
return; // selection hasn't changed; nothing to do
}
}
for (int i = 0; i < model.getSize(); i++) {
File aFile = model.getFile(i);
if ((aFile != null) && aFile.equals(file)) {
setSelectedIndex(i);
Rectangle rect = getCellBounds(i, i);
scrollRectToVisible(rect);
return;
}
}
}
void setSelectedFiles(Iterable<File> files) {
List<Integer> indexes = new ArrayList<>();
for (File f : files) {
indexes.add(model.indexOfFile(f));
}
int[] indices = new int[indexes.size()];
for (int i = 0; i < indices.length; i++) {
indices[i] = indexes.get(i);
}
setSelectedIndices(indices);
}
private boolean isEditing() {
return (editedFile != null);
}
void editListCell(int index) {
if (index == -1) {
return;
}
add(listEditor);
Rectangle r = getCellBounds(index, index);
editedFile = model.getFile(index);
if (editedFile == null) {
throw new AssertException(
"Unexpected condition - asked to edit file that " + "does not exist in model");
}
listEditor.setBounds(r.x, r.y, r.width, r.height);
listEditor.setVisible(true);
listEditorLabel.setIcon(chooser.getModel().getIcon(editedFile));
listEditorField.setText(editedFile.getName());
listEditorField.requestFocus();
listEditorField.selectAll();
}
void cancelListEdit() {
editedFile = null;
remove(listEditor);
listEditor.setVisible(false);
listEditorLabel.setIcon(null);
listEditorField.setText("");
repaint();
}
void stopListEdit() {
// this method can be called even when we are not editing
if (!isEditing()) {
return;
}
String invalidFilenameMessage =
chooser.getInvalidFilenameMessage(listEditorField.getText());
if (invalidFilenameMessage != null) {
chooser.setStatusText("Rename aborted - " + invalidFilenameMessage);
cancelListEdit();
return;
}
File editedFileCopy = editedFile;
int index = model.indexOfFile(editedFileCopy);
if (index < 0) {
throw new AssertException("Somehow editing file not in our model.");
}
File dest = new File(editedFileCopy.getParentFile(), listEditorField.getText());
cancelListEdit();
if (chooser.getModel().renameFile(editedFileCopy, dest)) {
chooser.setStatusText("");
model.set(index, dest);
//chooser.updateFiles(chooser.getCurrentDirectory(), true);
chooser.setSelectedFileAndUpdateDisplay(dest);
}
else {
chooser.setStatusText("Unable to rename " + editedFileCopy);
}
}
/*junit*/ JTextField getListEditorText() {
return listEditorField;
}
}
| |
/**
*/
package geometry.provider;
import geometry.Geometry;
import geometry.GeometryFactory;
import geometry.GeometryPackage;
import geometry.commands.CreateGObjectWLabel;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.command.Command;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.command.CommandParameter;
import org.eclipse.emf.edit.command.SetCommand;
import org.eclipse.emf.edit.domain.EditingDomain;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
/**
* This is the item provider adapter for a {@link geometry.Geometry} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class GeometryItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public GeometryItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(GeometryPackage.Literals.GEOMETRY__GOBJECTS);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns Geometry.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/Geometry"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
return getString("_UI_Geometry_type");
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(Geometry.class)) {
case GeometryPackage.GEOMETRY__GOBJECTS:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createGObject()));
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createLine()));
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createPoint()));
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createBendPoint()));
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createConnector()));
newChildDescriptors.add
(createChildParameter
(GeometryPackage.Literals.GEOMETRY__GOBJECTS,
GeometryFactory.eINSTANCE.createInputPoint()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return GeometryEditPlugin.INSTANCE;
}
/**
* When you add a new GObject to the tree editor the GObject will have
* a specific label.
* @author Morten
*/
@Override
protected Command createAddCommand(EditingDomain domain, EObject owner,
EStructuralFeature feature, Collection<?> collection, int index) {
if (feature == GeometryPackage.eINSTANCE.getGeometry_GObjects()) {
return new CreateGObjectWLabel(domain, owner,
super.createAddCommand(domain, owner, feature, collection, index));
}
return super.createAddCommand(domain, owner, feature, collection, index);
}
}
| |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.system;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Range;
import com.google.common.io.Files;
import com.google.common.util.concurrent.FutureFallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.Service;
import com.fasterxml.jackson.core.type.TypeReference;
import com.spotify.docker.client.ContainerNotFoundException;
import com.spotify.docker.client.DefaultDockerClient;
import com.spotify.docker.client.DockerCertificates;
import com.spotify.docker.client.DockerClient;
import com.spotify.docker.client.DockerException;
import com.spotify.docker.client.DockerRequestException;
import com.spotify.docker.client.ImageNotFoundException;
import com.spotify.docker.client.LogMessage;
import com.spotify.docker.client.LogReader;
import com.spotify.docker.client.messages.Container;
import com.spotify.docker.client.messages.ContainerConfig;
import com.spotify.docker.client.messages.ContainerCreation;
import com.spotify.docker.client.messages.ContainerInfo;
import com.spotify.docker.client.messages.HostConfig;
import com.spotify.docker.client.messages.PortBinding;
import com.spotify.helios.Polling;
import com.spotify.helios.TemporaryPorts;
import com.spotify.helios.TemporaryPorts.AllocatedPort;
import com.spotify.helios.ZooKeeperTestManager;
import com.spotify.helios.ZooKeeperTestingServerManager;
import com.spotify.helios.agent.AgentMain;
import com.spotify.helios.cli.CliMain;
import com.spotify.helios.client.HeliosClient;
import com.spotify.helios.common.Json;
import com.spotify.helios.common.descriptors.Deployment;
import com.spotify.helios.common.descriptors.DeploymentGroupStatus;
import com.spotify.helios.common.descriptors.HostStatus;
import com.spotify.helios.common.descriptors.Job;
import com.spotify.helios.common.descriptors.JobId;
import com.spotify.helios.common.descriptors.JobStatus;
import com.spotify.helios.common.descriptors.PortMapping;
import com.spotify.helios.common.descriptors.ServiceEndpoint;
import com.spotify.helios.common.descriptors.ServicePorts;
import com.spotify.helios.common.descriptors.TaskStatus;
import com.spotify.helios.common.descriptors.ThrottleState;
import com.spotify.helios.common.protocol.DeploymentGroupStatusResponse;
import com.spotify.helios.common.protocol.JobDeleteResponse;
import com.spotify.helios.common.protocol.JobUndeployResponse;
import com.spotify.helios.master.MasterMain;
import com.spotify.helios.servicescommon.DockerHost;
import com.spotify.helios.servicescommon.coordination.CuratorClientFactory;
import com.spotify.helios.servicescommon.coordination.Paths;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.curator.framework.CuratorFramework;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestRule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.bridge.SLF4JBridgeHandler;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.Socket;
import java.net.URI;
import java.nio.file.Path;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static com.google.common.base.CharMatcher.WHITESPACE;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Lists.newArrayList;
import static com.spotify.helios.common.descriptors.Job.EMPTY_ENV;
import static com.spotify.helios.common.descriptors.Job.EMPTY_EXPIRES;
import static com.spotify.helios.common.descriptors.Job.EMPTY_GRACE_PERIOD;
import static com.spotify.helios.common.descriptors.Job.EMPTY_PORTS;
import static com.spotify.helios.common.descriptors.Job.EMPTY_REGISTRATION;
import static com.spotify.helios.common.descriptors.Job.EMPTY_VOLUMES;
import static com.spotify.helios.common.descriptors.Job.EMPTY_HOSTNAME;
import static java.lang.Integer.toHexString;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public abstract class SystemTestBase {
private static final Logger log = LoggerFactory.getLogger(SystemTestBase.class);
public static final int WAIT_TIMEOUT_SECONDS = 40;
public static final int LONG_WAIT_SECONDS = 400;
public static final String BUSYBOX = "busybox:latest";
public static final String NGINX = "rohan/nginx-alpine:latest";
public static final String UHTTPD = "fnichol/docker-uhttpd:latest";
public static final String ALPINE = "onescience/alpine:latest";
public static final String MEMCACHED = "rohan/memcached-mini:latest";
public static final List<String> IDLE_COMMAND = asList(
"sh", "-c", "trap 'exit 0' SIGINT SIGTERM; while :; do sleep 1; done");
public final String testTag = "test_" + randomHexString();
public final String testJobName = "job_" + testTag;
public final String testJobVersion = "v" + randomHexString();
public final String testJobNameAndVersion = testJobName + ":" + testJobVersion;
public static final DockerHost DOCKER_HOST = DockerHost.fromEnv();
public static final String TEST_USER = "test-user";
public static final String TEST_HOST = "test-host";
public static final String TEST_MASTER = "test-master";
@Rule public final TemporaryPorts temporaryPorts = TemporaryPorts.create();
@Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule public final ExpectedException exception = ExpectedException.none();
@Rule public final TestRule watcher = new LoggingTestWatcher();
private int masterPort;
private int masterAdminPort;
private String masterEndpoint;
private boolean integrationMode;
private Range<Integer> dockerPortRange;
private final List<Service> services = newArrayList();
private final List<HeliosClient> clients = Lists.newArrayList();
private String testHost;
private Path agentStateDirs;
private Path masterStateDirs;
private String masterName;
private ZooKeeperTestManager zk;
protected static String zooKeeperNamespace = null;
protected final String zkClusterId = String.valueOf(ThreadLocalRandom.current().nextInt(10000));
@BeforeClass
public static void staticSetup() {
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
}
@Before
public void baseSetup() throws Exception {
System.setProperty("user.name", TEST_USER);
masterPort = temporaryPorts.localPort("helios master");
masterAdminPort = temporaryPorts.localPort("helios master admin");
String className = getClass().getName();
if (className.endsWith("ITCase")) {
masterEndpoint = checkNotNull(System.getenv("HELIOS_ENDPOINT"),
"For integration tests, HELIOS_ENDPOINT *must* be set");
integrationMode = true;
} else if (className.endsWith("Test")) {
integrationMode = false;
masterEndpoint = "http://localhost:" + masterPort();
// unit test
} else {
throw new RuntimeException("Test class' name must end in either 'Test' or 'ITCase'.");
}
zk = zooKeeperTestManager();
listThreads();
zk.ensure("/config");
zk.ensure("/status");
agentStateDirs = temporaryFolder.newFolder("helios-agents").toPath();
masterStateDirs = temporaryFolder.newFolder("helios-masters").toPath();
}
@Before
public void dockerSetup() throws Exception {
final String portRange = System.getenv("DOCKER_PORT_RANGE");
final AllocatedPort allocatedPort;
final int probePort;
if (portRange != null) {
final String[] parts = portRange.split(":", 2);
dockerPortRange = Range.closedOpen(Integer.valueOf(parts[0]),
Integer.valueOf(parts[1]));
allocatedPort = Polling.await(LONG_WAIT_SECONDS, SECONDS, new Callable<AllocatedPort>() {
@Override
public AllocatedPort call() throws Exception {
final int port = ThreadLocalRandom.current().nextInt(dockerPortRange.lowerEndpoint(),
dockerPortRange.upperEndpoint());
return temporaryPorts.tryAcquire("docker-probe", port);
}
});
probePort = allocatedPort.port();
} else {
dockerPortRange = temporaryPorts.localPortRange("docker", 10);
probePort = dockerPortRange().lowerEndpoint();
allocatedPort = null;
}
try {
assertDockerReachable(probePort);
} finally {
if (allocatedPort != null) {
allocatedPort.release();
}
}
}
protected DockerClient getNewDockerClient() throws Exception {
if (isNullOrEmpty(DOCKER_HOST.dockerCertPath())) {
return new DefaultDockerClient(DOCKER_HOST.uri());
} else {
final Path dockerCertPath = java.nio.file.Paths.get(DOCKER_HOST.dockerCertPath());
return new DefaultDockerClient(DOCKER_HOST.uri(), new DockerCertificates(dockerCertPath));
}
}
private void assertDockerReachable(final int probePort) throws Exception {
try (final DockerClient docker = getNewDockerClient()) {
// Pull our base images
try {
docker.inspectImage(BUSYBOX);
} catch (ImageNotFoundException e) {
docker.pull(BUSYBOX);
}
try {
docker.inspectImage(ALPINE);
} catch (ImageNotFoundException e) {
docker.pull(ALPINE);
}
// Start a container with an exposed port
final HostConfig hostConfig = HostConfig.builder()
.portBindings(ImmutableMap.of("4711/tcp",
singletonList(PortBinding.of("0.0.0.0", probePort))))
.build();
final ContainerConfig config = ContainerConfig.builder()
.image(BUSYBOX)
.cmd("nc", "-p", "4711", "-lle", "cat")
.exposedPorts(ImmutableSet.of("4711/tcp"))
.hostConfig(hostConfig)
.build();
final ContainerCreation creation = docker.createContainer(config, testTag + "-probe");
final String containerId = creation.id();
docker.startContainer(containerId);
// Wait for container to come up
Polling.await(5, SECONDS, new Callable<Object>() {
@Override
public Object call() throws Exception {
final ContainerInfo info = docker.inspectContainer(containerId);
return info.state().running() ? true : null;
}
});
log.info("Verifying that docker containers are reachable");
try {
Polling.awaitUnchecked(5, SECONDS, new Callable<Object>() {
@Override
public Object call() throws Exception {
log.info("Probing: {}:{}", DOCKER_HOST.address(), probePort);
try (final Socket ignored = new Socket(DOCKER_HOST.address(), probePort)) {
return true;
} catch (IOException e) {
return false;
}
}
});
} catch (TimeoutException e) {
fail("Please ensure that DOCKER_HOST is set to an address that where containers can " +
"be reached. If docker is running in a local VM, DOCKER_HOST must be set to the " +
"address of that VM. If docker can only be reached on a limited port range, " +
"set the environment variable DOCKER_PORT_RANGE=start:end");
}
docker.killContainer(containerId);
}
}
protected ZooKeeperTestManager zooKeeperTestManager() {
return new ZooKeeperTestingServerManager(zooKeeperNamespace);
}
@After
public void baseTeardown() throws Exception {
tearDownJobs();
for (final HeliosClient client : clients) {
client.close();
}
clients.clear();
for (Service service : services) {
try {
service.stopAsync();
} catch (Exception e) {
log.error("Uncaught exception", e);
}
}
for (Service service : services) {
try {
service.awaitTerminated();
} catch (Exception e) {
log.error("Service failed", e);
}
}
services.clear();
// Clean up docker
try (final DockerClient dockerClient = getNewDockerClient()) {
final List<Container> containers = dockerClient.listContainers();
for (final Container container : containers) {
for (final String name : container.names()) {
if (name.contains(testTag)) {
try {
dockerClient.killContainer(container.id());
} catch (DockerException e) {
e.printStackTrace();
}
break;
}
}
}
} catch (Exception e) {
log.error("Docker client exception", e);
}
if (zk != null) {
zk.close();
}
listThreads();
}
private void listThreads() {
final Set<Thread> threads = Thread.getAllStackTraces().keySet();
final Map<String, Thread> sorted = Maps.newTreeMap();
for (final Thread t : threads) {
final ThreadGroup tg = t.getThreadGroup();
if (t.isAlive() && (tg == null || !tg.getName().equals("system"))) {
sorted.put(t.getName(), t);
}
}
log.info("= THREADS " + Strings.repeat("=", 70));
for (final Thread t : sorted.values()) {
final ThreadGroup tg = t.getThreadGroup();
log.info("{}: \"{}\" ({}{})", t.getId(), t.getName(),
(tg == null ? "" : tg.getName() + " "),
(t.isDaemon() ? "daemon" : ""));
}
log.info(Strings.repeat("=", 80));
}
protected void tearDownJobs() throws InterruptedException, ExecutionException {
if (!isIntegration()) {
return;
}
if (System.getenv("ITCASE_PRESERVE_JOBS") != null) {
return;
}
final List<ListenableFuture<JobUndeployResponse>> undeploys = Lists.newArrayList();
final HeliosClient c = defaultClient();
final Map<JobId, Job> jobs = c.jobs().get();
for (JobId jobId : jobs.keySet()) {
if (!jobId.toString().startsWith(testTag)) {
continue;
}
final JobStatus st = c.jobStatus(jobId).get();
final Set<String> hosts = st.getDeployments().keySet();
for (String host : hosts) {
log.info("Undeploying job " + jobId);
undeploys.add(c.undeploy(jobId, host));
}
}
Futures.allAsList(undeploys);
final List<ListenableFuture<JobDeleteResponse>> deletes = Lists.newArrayList();
for (JobId jobId : jobs.keySet()) {
if (!jobId.toString().startsWith(testTag)) {
continue;
}
log.info("Deleting job " + jobId);
deletes.add(c.deleteJob(jobId));
}
Futures.allAsList(deletes);
}
protected boolean isIntegration() {
return integrationMode;
}
protected TemporaryPorts temporaryPorts() {
return temporaryPorts;
}
protected ZooKeeperTestManager zk() {
return zk;
}
protected String masterEndpoint() {
return masterEndpoint;
}
protected String masterName() throws InterruptedException, ExecutionException {
if (integrationMode) {
if (masterName == null) {
masterName = defaultClient().listMasters().get().get(0);
}
return masterName;
} else {
return "test-master";
}
}
protected HeliosClient defaultClient() {
return client(TEST_USER, masterEndpoint());
}
protected HeliosClient client(final String user, final String endpoint) {
final HeliosClient client = HeliosClient.newBuilder()
.setUser(user)
.setEndpoints(singletonList(URI.create(endpoint)))
.build();
clients.add(client);
return client;
}
protected int masterPort() {
return masterPort;
}
protected int masterAdminPort() {
return masterAdminPort;
}
public Range<Integer> dockerPortRange() {
return dockerPortRange;
}
protected String testHost() throws InterruptedException, ExecutionException {
if (integrationMode) {
if (testHost == null) {
final List<String> hosts = defaultClient().listHosts().get();
testHost = hosts.get(new SecureRandom().nextInt(hosts.size()));
}
return testHost;
} else {
return TEST_HOST;
}
}
protected List<String> setupDefaultMaster(String... args) throws Exception {
return setupDefaultMaster(0, args);
}
protected List<String> setupDefaultMaster(final int offset, String... args) throws Exception {
if (isIntegration()) {
checkArgument(args.length == 0,
"cannot start default master in integration test with arguments passed");
return null;
}
// TODO (dano): Move this bootstrapping to something reusable
final CuratorFramework curator = zk.curator();
curator.newNamespaceAwareEnsurePath(Paths.configHosts()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.configJobs()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.configJobRefs()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.statusHosts()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.statusMasters()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.historyJobs()).ensure(curator.getZookeeperClient());
curator.newNamespaceAwareEnsurePath(Paths.configId(zkClusterId))
.ensure(curator.getZookeeperClient());
final List<String> argsList = Lists.newArrayList(
"-vvvv",
"--no-log-setup",
"--http", "http://localhost:" + (masterPort() + offset),
"--admin=" + (masterAdminPort() + offset),
"--domain", "",
"--zk", zk.connectString()
);
final String name;
if (asList(args).contains("--name")) {
name = args[asList(args).indexOf("--name") + 1];
} else {
name = TEST_MASTER + offset;
argsList.addAll(asList("--name", TEST_MASTER));
}
final String stateDir = masterStateDirs.resolve(name).toString();
argsList.addAll(asList("--state-dir", stateDir));
argsList.addAll(asList(args));
return argsList;
}
protected MasterMain startDefaultMaster(String... args) throws Exception {
return startDefaultMaster(0, args);
}
protected MasterMain startDefaultMaster(final int offset, String... args) throws Exception {
final List<String> argsList = setupDefaultMaster(offset, args);
if (argsList == null) {
return null;
}
final MasterMain master = startMaster(argsList.toArray(new String[argsList.size()]));
waitForMasterToConnectToZK();
return master;
}
protected Map<String, MasterMain> startDefaultMasters(final int numMasters, String... args)
throws Exception {
final Map<String, MasterMain> masters = Maps.newHashMap();
for (int i = 0; i < numMasters; i++) {
final String name = TEST_MASTER + i;
final List<String> argsList = Lists.newArrayList(args);
argsList.addAll(asList("--name", name));
masters.put(name, startDefaultMaster(i, argsList.toArray(new String[argsList.size()])));
}
return masters;
}
protected void waitForMasterToConnectToZK() throws Exception {
Polling.await(WAIT_TIMEOUT_SECONDS, SECONDS, new Callable<Object>() {
@Override
public Object call() {
try {
final List<String> masters = defaultClient().listMasters().get();
return masters != null;
} catch (Exception e) {
return null;
}
}
});
}
protected void startDefaultMasterDontWaitForZK(final CuratorClientFactory curatorClientFactory,
String... args) throws Exception {
List<String> argsList = setupDefaultMaster(args);
if (argsList == null) {
return;
}
startMaster(curatorClientFactory, argsList.toArray(new String[argsList.size()]));
}
protected AgentMain startDefaultAgent(final String host, final String... args)
throws Exception {
if (isIntegration()) {
checkArgument(args.length == 0,
"cannot start default agent in integration test with arguments passed");
return null;
}
final String stateDir = agentStateDirs.resolve(host).toString();
final List<String> argsList = Lists.newArrayList("-vvvv",
"--no-log-setup",
"--no-http",
"--name", host,
"--docker=" + DOCKER_HOST,
"--zk", zk.connectString(),
"--zk-session-timeout", "100",
"--zk-connection-timeout", "100",
"--state-dir", stateDir,
"--domain", "",
"--port-range=" +
dockerPortRange.lowerEndpoint() + ":" +
dockerPortRange.upperEndpoint()
);
argsList.addAll(asList(args));
return startAgent(argsList.toArray(new String[argsList.size()]));
}
protected MasterMain startMaster(final String... args) throws Exception {
final MasterMain main = new MasterMain(args);
main.startAsync().awaitRunning();
services.add(main);
return main;
}
MasterMain startMaster(final CuratorClientFactory curatorClientFactory,
final String... args) throws Exception {
final MasterMain main = new MasterMain(curatorClientFactory, args);
main.startAsync().awaitRunning();
services.add(main);
return main;
}
protected AgentMain startAgent(final String... args) throws Exception {
final AgentMain main = new AgentMain(args);
main.startAsync().awaitRunning();
services.add(main);
return main;
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command) throws Exception {
return createJob(name, version, image, command, EMPTY_ENV, EMPTY_PORTS, EMPTY_REGISTRATION);
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command,
final Date expires) throws Exception {
return createJob(name, version, image, EMPTY_HOSTNAME, command, EMPTY_ENV, EMPTY_PORTS,
EMPTY_REGISTRATION, EMPTY_GRACE_PERIOD, EMPTY_VOLUMES, expires);
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command,
final ImmutableMap<String, String> env)
throws Exception {
return createJob(name, version, image, command, env, EMPTY_PORTS, EMPTY_REGISTRATION);
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command,
final Map<String, String> env,
final Map<String, PortMapping> ports) throws Exception {
return createJob(name, version, image, command, env, ports, EMPTY_REGISTRATION);
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command,
final Map<String, String> env,
final Map<String, PortMapping> ports,
final Map<ServiceEndpoint, ServicePorts> registration)
throws Exception {
return createJob(name, version, image, command, env, ports, registration, EMPTY_GRACE_PERIOD,
EMPTY_VOLUMES);
}
protected JobId createJob(final String name,
final String version,
final String image,
final List<String> command,
final Map<String, String> env,
final Map<String, PortMapping> ports,
final Map<ServiceEndpoint, ServicePorts> registration,
final Integer gracePeriod,
final Map<String, String> volumes) throws Exception {
return createJob(name, version, image, EMPTY_HOSTNAME, command, env, ports, registration,
gracePeriod, volumes, EMPTY_EXPIRES);
}
protected JobId createJob(final String name,
final String version,
final String image,
final String hostname,
final List<String> command,
final Map<String, String> env,
final Map<String, PortMapping> ports,
final Map<ServiceEndpoint, ServicePorts> registration,
final Integer gracePeriod,
final Map<String, String> volumes,
final Date expires) throws Exception {
return createJob(Job.newBuilder()
.setName(name)
.setVersion(version)
.setImage(image)
.setHostname(hostname)
.setCommand(command)
.setEnv(env)
.setPorts(ports)
.setRegistration(registration)
.setGracePeriod(gracePeriod)
.setVolumes(volumes)
.setExpires(expires)
.build());
}
protected JobId createJob(final Job job) throws Exception {
final String createOutput = createJobRawOutput(job);
final String jobId = WHITESPACE.trimFrom(createOutput);
return JobId.fromString(jobId);
}
protected String createJobRawOutput(final Job job) throws Exception {
final String name = job.getId().getName();
checkArgument(name.contains(testTag), "Job name must contain testTag to enable cleanup");
final String serializedConfig = Json.asNormalizedString(job);
final File configFile = temporaryFolder.newFile();
Files.write(serializedConfig, configFile, Charsets.UTF_8);
final List<String> args = ImmutableList.of("-q", "-f", configFile.getAbsolutePath());
return cli("create", args);
}
protected void deployJob(final JobId jobId, final String host) throws Exception {
deployJob(jobId, host, null);
}
protected void deployJob(final JobId jobId, final String host, final String token)
throws Exception {
final List<String> deployArgs = Lists.newArrayList(jobId.toString(), host);
if (token != null) {
deployArgs.addAll(ImmutableList.of("--token", token));
}
final String deployOutput = cli("deploy", deployArgs);
assertThat(deployOutput, containsString(host + ": done"));
final String output = cli("status", "--host", host, "--json");
final Map<JobId, JobStatus> statuses =
Json.readUnchecked(output, new TypeReference<Map<JobId, JobStatus>>() {
});
assertTrue(statuses.keySet().contains(jobId));
}
protected void undeployJob(final JobId jobId, final String host) throws Exception {
final String undeployOutput = cli("undeploy", jobId.toString(), host);
assertThat(undeployOutput, containsString(host + ": done"));
final String output = cli("status", "--host", host, "--json");
final Map<JobId, JobStatus> statuses =
Json.readUnchecked(output, new TypeReference<Map<JobId, JobStatus>>() {
});
final JobStatus status = statuses.get(jobId);
assertTrue(status == null || status.getDeployments().get(host) == null);
}
protected String startJob(final JobId jobId, final String host) throws Exception {
return cli("start", jobId.toString(), host);
}
protected String stopJob(final JobId jobId, final String host) throws Exception {
return cli("stop", jobId.toString(), host);
}
protected String deregisterHost(final String host) throws Exception {
return cli("deregister", host, "--yes");
}
protected String cli(final String command, final Object... args)
throws Exception {
return cli(command, flatten(args));
}
protected String cli(final String command, final String... args)
throws Exception {
return cli(command, asList(args));
}
protected String cli(final String command, final List<String> args)
throws Exception {
final List<String> commands = asList(command, "-z", masterEndpoint(), "--no-log-setup");
final List<String> allArgs = newArrayList(concat(commands, args));
return main(allArgs).toString();
}
protected <T> T cliJson(final Class<T> klass, final String command, final String... args)
throws Exception {
return cliJson(klass, command, asList(args));
}
protected <T> T cliJson(final Class<T> klass, final String command, final List<String> args)
throws Exception {
final List<String> args0 = newArrayList("--json");
args0.addAll(args);
return Json.read(cli(command, args0), klass);
}
protected ByteArrayOutputStream main(final String... args) throws Exception {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final ByteArrayOutputStream err = new ByteArrayOutputStream();
final CliMain main = new CliMain(new PrintStream(out), new PrintStream(err), args);
main.run();
return out;
}
protected ByteArrayOutputStream main(final Collection<String> args) throws Exception {
return main(args.toArray(new String[args.size()]));
}
protected void awaitHostRegistered(final String name, final long timeout, final TimeUnit timeUnit)
throws Exception {
Polling.await(timeout, timeUnit, new Callable<Object>() {
@Override
public Object call() throws Exception {
final String output = cli("hosts", "-q");
return output.contains(name) ? true : null;
}
});
}
protected HostStatus awaitHostStatus(final String name, final HostStatus.Status status,
final int timeout, final TimeUnit timeUnit)
throws Exception {
return Polling.await(timeout, timeUnit, new Callable<HostStatus>() {
@Override
public HostStatus call() throws Exception {
final String output = cli("hosts", name, "--json");
final Map<String, HostStatus> statuses;
try {
statuses = Json.read(output, new TypeReference<Map<String, HostStatus>>() {});
} catch (IOException e) {
return null;
}
final HostStatus hostStatus = statuses.get(name);
if (hostStatus == null) {
return null;
}
return (hostStatus.getStatus() == status) ? hostStatus : null;
}
});
}
protected TaskStatus awaitJobState(final HeliosClient client, final String host,
final JobId jobId,
final TaskStatus.State state, final int timeout,
final TimeUnit timeunit) throws Exception {
return Polling.await(timeout, timeunit, new Callable<TaskStatus>() {
@Override
public TaskStatus call() throws Exception {
final HostStatus hostStatus = getOrNull(client.hostStatus(host));
if (hostStatus == null) {
return null;
}
final TaskStatus taskStatus = hostStatus.getStatuses().get(jobId);
return (taskStatus != null && taskStatus.getState() == state) ? taskStatus
: null;
}
});
}
protected TaskStatus awaitJobThrottle(final HeliosClient client, final String host,
final JobId jobId,
final ThrottleState throttled, final int timeout,
final TimeUnit timeunit) throws Exception {
return Polling.await(timeout, timeunit, new Callable<TaskStatus>() {
@Override
public TaskStatus call() throws Exception {
final HostStatus hostStatus = getOrNull(client.hostStatus(host));
if (hostStatus == null) {
return null;
}
final TaskStatus taskStatus = hostStatus.getStatuses().get(jobId);
return (taskStatus != null && taskStatus.getThrottled() == throttled) ? taskStatus : null;
}
});
}
protected void awaitHostRegistered(final HeliosClient client, final String host,
final int timeout,
final TimeUnit timeUnit) throws Exception {
Polling.await(timeout, timeUnit, new Callable<HostStatus>() {
@Override
public HostStatus call() throws Exception {
return getOrNull(client.hostStatus(host));
}
});
}
protected HostStatus awaitHostStatus(final HeliosClient client, final String host,
final HostStatus.Status status,
final int timeout,
final TimeUnit timeUnit) throws Exception {
return Polling.await(timeout, timeUnit, new Callable<HostStatus>() {
@Override
public HostStatus call() throws Exception {
final HostStatus hostStatus = getOrNull(client.hostStatus(host));
if (hostStatus == null) {
return null;
}
return (hostStatus.getStatus() == status) ? hostStatus : null;
}
});
}
protected TaskStatus awaitTaskState(final JobId jobId, final String host,
final TaskStatus.State state) throws Exception {
return Polling.await(LONG_WAIT_SECONDS, SECONDS, new Callable<TaskStatus>() {
@Override
public TaskStatus call() throws Exception {
final String output = cli("status", "--json", "--job", jobId.toString());
final Map<JobId, JobStatus> statusMap;
try {
statusMap = Json.read(output, new TypeReference<Map<JobId, JobStatus>>() {});
} catch (IOException e) {
return null;
}
final JobStatus status = statusMap.get(jobId);
if (status == null) {
return null;
}
final TaskStatus taskStatus = status.getTaskStatuses().get(host);
if (taskStatus == null) {
return null;
}
if (taskStatus.getState() != state) {
return null;
}
return taskStatus;
}
});
}
protected void awaitTaskGone(final HeliosClient client, final String host, final JobId jobId,
final long timeout, final TimeUnit timeunit) throws Exception {
Polling.await(timeout, timeunit, new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
final HostStatus hostStatus = getOrNull(client.hostStatus(host));
final TaskStatus taskStatus = hostStatus.getStatuses().get(jobId);
final Deployment deployment = hostStatus.getJobs().get(jobId);
return taskStatus == null && deployment == null ? true : null;
}
});
}
protected DeploymentGroupStatus awaitDeploymentGroupStatus(
final HeliosClient client,
final String name,
final DeploymentGroupStatus.State state)
throws Exception {
return Polling.await(LONG_WAIT_SECONDS, SECONDS, new Callable<DeploymentGroupStatus>() {
@Override
public DeploymentGroupStatus call() throws Exception {
final DeploymentGroupStatusResponse response = getOrNull(
client.deploymentGroupStatus(name));
if (response != null) {
final DeploymentGroupStatus status = response.getDeploymentGroupStatus();
if (status.getState().equals(state)) {
return status;
} else if (status.getState().equals(DeploymentGroupStatus.State.FAILED)) {
assertEquals(state, status.getState());
}
}
return null;
}
});
}
protected <T> T getOrNull(final ListenableFuture<T> future)
throws ExecutionException, InterruptedException {
return Futures.withFallback(future, new FutureFallback<T>() {
@Override
public ListenableFuture<T> create(@NotNull final Throwable t) throws Exception {
return Futures.immediateFuture(null);
}
}).get();
}
protected String readLogFully(final ClientResponse logs) throws IOException {
final LogReader logReader = new LogReader(logs.getEntityInputStream());
StringBuilder stringBuilder = new StringBuilder();
LogMessage logMessage;
while ((logMessage = logReader.nextMessage()) != null) {
stringBuilder.append(UTF_8.decode(logMessage.content()));
}
logReader.close();
return stringBuilder.toString();
}
protected static void removeContainer(final DockerClient dockerClient, final String containerId)
throws Exception {
// Work around docker sometimes failing to remove a container directly after killing it
Polling.await(1, MINUTES, new Callable<Object>() {
@Override
public Object call() throws Exception {
try {
dockerClient.killContainer(containerId);
dockerClient.removeContainer(containerId);
return true;
} catch (ContainerNotFoundException e) {
// We're done here
return true;
} catch (DockerException e) {
if ((e instanceof DockerRequestException) &&
((DockerRequestException) e).message().contains(
"Driver btrfs failed to remove root filesystem")) {
// Workaround btrfs issue where removing containers throws an exception,
// but succeeds anyway.
return true;
} else {
return null;
}
}
}
});
}
protected List<Container> listContainers(final DockerClient dockerClient, final String needle)
throws DockerException, InterruptedException {
final List<Container> containers = dockerClient.listContainers();
final List<Container> matches = Lists.newArrayList();
for (final Container container : containers) {
if (container.names() != null) {
for (final String name : container.names()) {
if (name.contains(needle)) {
matches.add(container);
break;
}
}
}
}
return matches;
}
protected List<String> flatten(final Object... values) {
final Iterable<Object> valuesList = asList(values);
return flatten(valuesList);
}
protected List<String> flatten(final Iterable<?> values) {
final List<String> list = new ArrayList<>();
for (Object value : values) {
if (value instanceof Iterable) {
list.addAll(flatten((Iterable<?>) value));
} else if (value.getClass() == String[].class) {
list.addAll(asList((String[]) value));
} else if (value instanceof String) {
list.add((String) value);
} else {
throw new IllegalArgumentException();
}
}
return list;
}
protected void assertJobEquals(final Job expected, final Job actual) {
assertEquals(expected.toBuilder().setHash(actual.getId().getHash()).build(), actual);
}
protected static String randomHexString() {
return toHexString(ThreadLocalRandom.current().nextInt());
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.MatchQueryParser;
import java.io.IOException;
import java.util.Objects;
/**
* Match query is a query that analyzes the text and constructs a query as the
* result of the analysis.
*/
public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
private static final String CUTOFF_FREQUENCY_DEPRECATION_MSG = "cutoff_freqency is not supported. " +
"The [match] query can skip block of documents efficiently if the total number of hits is not tracked";
public static final ParseField CUTOFF_FREQUENCY_FIELD =
new ParseField("cutoff_frequency")
.withAllDeprecated(CUTOFF_FREQUENCY_DEPRECATION_MSG)
.forRestApiVersion(RestApiVersion.equalTo(RestApiVersion.V_7));
public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query");
public static final ParseField LENIENT_FIELD = new ParseField("lenient");
public static final ParseField FUZZY_TRANSPOSITIONS_FIELD = new ParseField("fuzzy_transpositions");
public static final ParseField FUZZY_REWRITE_FIELD = new ParseField("fuzzy_rewrite");
public static final ParseField MINIMUM_SHOULD_MATCH_FIELD = new ParseField("minimum_should_match");
public static final ParseField OPERATOR_FIELD = new ParseField("operator");
public static final ParseField MAX_EXPANSIONS_FIELD = new ParseField("max_expansions");
public static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
public static final ParseField ANALYZER_FIELD = new ParseField("analyzer");
public static final ParseField QUERY_FIELD = new ParseField("query");
public static final ParseField GENERATE_SYNONYMS_PHRASE_QUERY = new ParseField("auto_generate_synonyms_phrase_query");
/** The name for the match query */
public static final String NAME = "match";
/** The default mode terms are combined in a match query */
public static final Operator DEFAULT_OPERATOR = Operator.OR;
private final String fieldName;
private final Object value;
private Operator operator = DEFAULT_OPERATOR;
private String analyzer;
private Fuzziness fuzziness = null;
private int prefixLength = FuzzyQuery.defaultPrefixLength;
private int maxExpansions = FuzzyQuery.defaultMaxExpansions;
private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;
private String minimumShouldMatch;
private String fuzzyRewrite = null;
private boolean lenient = MatchQueryParser.DEFAULT_LENIENCY;
private ZeroTermsQueryOption zeroTermsQuery = MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY;
private boolean autoGenerateSynonymsPhraseQuery = true;
/**
* Constructs a new match query.
*/
public MatchQueryBuilder(String fieldName, Object value) {
if (fieldName == null) {
throw new IllegalArgumentException("[" + NAME + "] requires fieldName");
}
if (value == null) {
throw new IllegalArgumentException("[" + NAME + "] requires query value");
}
this.fieldName = fieldName;
this.value = value;
}
/**
* Read from a stream.
*/
public MatchQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
value = in.readGenericValue();
operator = Operator.readFromStream(in);
prefixLength = in.readVInt();
maxExpansions = in.readVInt();
fuzzyTranspositions = in.readBoolean();
lenient = in.readBoolean();
zeroTermsQuery = ZeroTermsQueryOption.readFromStream(in);
// optional fields
analyzer = in.readOptionalString();
minimumShouldMatch = in.readOptionalString();
fuzzyRewrite = in.readOptionalString();
fuzziness = in.readOptionalWriteable(Fuzziness::new);
// cutoff_frequency has been removed
if (in.getVersion().before(Version.V_8_0_0)) {
in.readOptionalFloat();
}
autoGenerateSynonymsPhraseQuery = in.readBoolean();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGenericValue(value);
operator.writeTo(out);
out.writeVInt(prefixLength);
out.writeVInt(maxExpansions);
out.writeBoolean(fuzzyTranspositions);
out.writeBoolean(lenient);
zeroTermsQuery.writeTo(out);
// optional fields
out.writeOptionalString(analyzer);
out.writeOptionalString(minimumShouldMatch);
out.writeOptionalString(fuzzyRewrite);
out.writeOptionalWriteable(fuzziness);
// cutoff_frequency has been removed
if (out.getVersion().before(Version.V_8_0_0)) {
out.writeOptionalFloat(null);
}
out.writeBoolean(autoGenerateSynonymsPhraseQuery);
}
/** Returns the field name used in this query. */
public String fieldName() {
return this.fieldName;
}
/** Returns the value used in this query. */
public Object value() {
return this.value;
}
/** Sets the operator to use when using a boolean query. Defaults to {@code OR}. */
public MatchQueryBuilder operator(Operator operator) {
if (operator == null) {
throw new IllegalArgumentException("[" + NAME + "] requires operator to be non-null");
}
this.operator = operator;
return this;
}
/** Returns the operator to use in a boolean query.*/
public Operator operator() {
return this.operator;
}
/**
* Explicitly set the analyzer to use. Defaults to use explicit mapping config for the field, or, if not
* set, the default search analyzer.
*/
public MatchQueryBuilder analyzer(String analyzer) {
this.analyzer = analyzer;
return this;
}
/** Get the analyzer to use, if previously set, otherwise {@code null} */
public String analyzer() {
return this.analyzer;
}
/** Sets the fuzziness used when evaluated to a fuzzy query type. Defaults to "AUTO". */
public MatchQueryBuilder fuzziness(Fuzziness fuzziness) {
this.fuzziness = Objects.requireNonNull(fuzziness);
return this;
}
/** Gets the fuzziness used when evaluated to a fuzzy query type. */
public Fuzziness fuzziness() {
return this.fuzziness;
}
/**
* Sets the length of a length of common (non-fuzzy) prefix for fuzzy match queries
* @param prefixLength non-negative length of prefix
* @throws IllegalArgumentException in case the prefix is negative
*/
public MatchQueryBuilder prefixLength(int prefixLength) {
if (prefixLength < 0 ) {
throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative.");
}
this.prefixLength = prefixLength;
return this;
}
/**
* Gets the length of a length of common (non-fuzzy) prefix for fuzzy match queries
*/
public int prefixLength() {
return this.prefixLength;
}
/**
* When using fuzzy or prefix type query, the number of term expansions to use.
*/
public MatchQueryBuilder maxExpansions(int maxExpansions) {
if (maxExpansions <= 0 ) {
throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive.");
}
this.maxExpansions = maxExpansions;
return this;
}
/**
* Get the (optional) number of term expansions when using fuzzy or prefix type query.
*/
public int maxExpansions() {
return this.maxExpansions;
}
/** Sets optional minimumShouldMatch value to apply to the query */
public MatchQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
this.minimumShouldMatch = minimumShouldMatch;
return this;
}
/** Gets the minimumShouldMatch value */
public String minimumShouldMatch() {
return this.minimumShouldMatch;
}
/** Sets the fuzzy_rewrite parameter controlling how the fuzzy query will get rewritten */
public MatchQueryBuilder fuzzyRewrite(String fuzzyRewrite) {
this.fuzzyRewrite = fuzzyRewrite;
return this;
}
/**
* Get the fuzzy_rewrite parameter
* @see #fuzzyRewrite(String)
*/
public String fuzzyRewrite() {
return this.fuzzyRewrite;
}
/**
* Sets whether transpositions are supported in fuzzy queries.<p>
* The default metric used by fuzzy queries to determine a match is the Damerau-Levenshtein
* distance formula which supports transpositions. Setting transposition to false will
* switch to classic Levenshtein distance.<br>
* If not set, Damerau-Levenshtein distance metric will be used.
*/
public MatchQueryBuilder fuzzyTranspositions(boolean fuzzyTranspositions) {
this.fuzzyTranspositions = fuzzyTranspositions;
return this;
}
/** Gets the fuzzy query transposition setting. */
public boolean fuzzyTranspositions() {
return this.fuzzyTranspositions;
}
/**
* Sets whether format based failures will be ignored.
*/
public MatchQueryBuilder lenient(boolean lenient) {
this.lenient = lenient;
return this;
}
/**
* Gets leniency setting that controls if format based failures will be ignored.
*/
public boolean lenient() {
return this.lenient;
}
/**
* Sets query to use in case no query terms are available, e.g. after analysis removed them.
* Defaults to {@link ZeroTermsQueryOption#NONE}, but can be set to
* {@link ZeroTermsQueryOption#ALL} instead.
*/
public MatchQueryBuilder zeroTermsQuery(ZeroTermsQueryOption zeroTermsQuery) {
if (zeroTermsQuery == null) {
throw new IllegalArgumentException("[" + NAME + "] requires zeroTermsQuery to be non-null");
}
this.zeroTermsQuery = zeroTermsQuery;
return this;
}
/**
* Returns the setting for handling zero terms queries.
*/
public ZeroTermsQueryOption zeroTermsQuery() {
return this.zeroTermsQuery;
}
public MatchQueryBuilder autoGenerateSynonymsPhraseQuery(boolean enable) {
this.autoGenerateSynonymsPhraseQuery = enable;
return this;
}
/**
* Whether phrase queries should be automatically generated for multi terms synonyms.
* Defaults to {@code true}.
*/
public boolean autoGenerateSynonymsPhraseQuery() {
return autoGenerateSynonymsPhraseQuery;
}
@Override
public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
builder.field(QUERY_FIELD.getPreferredName(), value);
builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString());
if (analyzer != null) {
builder.field(ANALYZER_FIELD.getPreferredName(), analyzer);
}
if (fuzziness != null) {
fuzziness.toXContent(builder, params);
}
builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength);
builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions);
if (minimumShouldMatch != null) {
builder.field(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch);
}
if (fuzzyRewrite != null) {
builder.field(FUZZY_REWRITE_FIELD.getPreferredName(), fuzzyRewrite);
}
// LUCENE 4 UPGRADE we need to document this & test this
builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions);
builder.field(LENIENT_FIELD.getPreferredName(), lenient);
builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString());
builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery);
printBoostAndQueryName(builder);
builder.endObject();
builder.endObject();
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
// validate context specific fields
if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
MatchQueryParser queryParser = new MatchQueryParser(context);
queryParser.setOccur(operator.toBooleanClauseOccur());
if (analyzer != null) {
queryParser.setAnalyzer(analyzer);
}
queryParser.setFuzziness(fuzziness);
queryParser.setFuzzyPrefixLength(prefixLength);
queryParser.setMaxExpansions(maxExpansions);
queryParser.setTranspositions(fuzzyTranspositions);
queryParser.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(fuzzyRewrite, null, LoggingDeprecationHandler.INSTANCE));
queryParser.setLenient(lenient);
queryParser.setZeroTermsQuery(zeroTermsQuery);
queryParser.setAutoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery);
Query query = queryParser.parse(MatchQueryParser.Type.BOOLEAN, fieldName, value);
return Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch);
}
@Override
protected boolean doEquals(MatchQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(value, other.value) &&
Objects.equals(operator, other.operator) &&
Objects.equals(analyzer, other.analyzer) &&
Objects.equals(fuzziness, other.fuzziness) &&
Objects.equals(prefixLength, other.prefixLength) &&
Objects.equals(maxExpansions, other.maxExpansions) &&
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
Objects.equals(fuzzyRewrite, other.fuzzyRewrite) &&
Objects.equals(lenient, other.lenient) &&
Objects.equals(fuzzyTranspositions, other.fuzzyTranspositions) &&
Objects.equals(zeroTermsQuery, other.zeroTermsQuery) &&
Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, value, operator, analyzer,
fuzziness, prefixLength, maxExpansions, minimumShouldMatch,
fuzzyRewrite, lenient, fuzzyTranspositions, zeroTermsQuery, autoGenerateSynonymsPhraseQuery);
}
@Override
public String getWriteableName() {
return NAME;
}
public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOException {
String fieldName = null;
Object value = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String minimumShouldMatch = null;
String analyzer = null;
Operator operator = MatchQueryBuilder.DEFAULT_OPERATOR;
Fuzziness fuzziness = null;
int prefixLength = FuzzyQuery.defaultPrefixLength;
int maxExpansion = FuzzyQuery.defaultMaxExpansions;
boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions;
String fuzzyRewrite = null;
boolean lenient = MatchQueryParser.DEFAULT_LENIENCY;
ZeroTermsQueryOption zeroTermsQuery = MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY;
boolean autoGenerateSynonymsPhraseQuery = true;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName);
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
value = parser.objectText();
} else if (ANALYZER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
analyzer = parser.text();
} else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
boost = parser.floatValue();
} else if (Fuzziness.FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fuzziness = Fuzziness.parse(parser);
} else if (PREFIX_LENGTH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
prefixLength = parser.intValue();
} else if (MAX_EXPANSIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
maxExpansion = parser.intValue();
} else if (OPERATOR_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
operator = Operator.fromString(parser.text());
} else if (MINIMUM_SHOULD_MATCH_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
minimumShouldMatch = parser.textOrNull();
} else if (FUZZY_REWRITE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fuzzyRewrite = parser.textOrNull();
} else if (FUZZY_TRANSPOSITIONS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
fuzzyTranspositions = parser.booleanValue();
} else if (LENIENT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
lenient = parser.booleanValue();
} else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
String zeroTermsValue = parser.text();
if ("none".equalsIgnoreCase(zeroTermsValue)) {
zeroTermsQuery = ZeroTermsQueryOption.NONE;
} else if ("all".equalsIgnoreCase(zeroTermsValue)) {
zeroTermsQuery = ZeroTermsQueryOption.ALL;
} else {
throw new ParsingException(parser.getTokenLocation(),
"Unsupported zero_terms_query value [" + zeroTermsValue + "]");
}
} else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
queryName = parser.text();
} else if (GENERATE_SYNONYMS_PHRASE_QUERY.match(currentFieldName, parser.getDeprecationHandler())) {
autoGenerateSynonymsPhraseQuery = parser.booleanValue();
} else if (parser.getRestApiVersion() == RestApiVersion.V_7 &&
CUTOFF_FREQUENCY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
throw new ParsingException(parser.getTokenLocation(), CUTOFF_FREQUENCY_DEPRECATION_MSG);
} else {
throw new ParsingException(parser.getTokenLocation(),
"[" + NAME + "] query does not support [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
}
}
} else {
throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, parser.currentName());
fieldName = parser.currentName();
value = parser.objectText();
}
}
if (value == null) {
throw new ParsingException(parser.getTokenLocation(), "No text specified for text query");
}
MatchQueryBuilder matchQuery = new MatchQueryBuilder(fieldName, value);
matchQuery.operator(operator);
matchQuery.analyzer(analyzer);
matchQuery.minimumShouldMatch(minimumShouldMatch);
if (fuzziness != null) {
matchQuery.fuzziness(fuzziness);
}
matchQuery.fuzzyRewrite(fuzzyRewrite);
matchQuery.prefixLength(prefixLength);
matchQuery.fuzzyTranspositions(fuzzyTranspositions);
matchQuery.maxExpansions(maxExpansion);
matchQuery.lenient(lenient);
matchQuery.zeroTermsQuery(zeroTermsQuery);
matchQuery.autoGenerateSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery);
matchQuery.queryName(queryName);
matchQuery.boost(boost);
return matchQuery;
}
}
| |
package com.parnswir.unmp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.RatingBar;
import android.widget.SeekBar;
import android.widget.TextView;
import com.parnswir.unmp.core.AlbumCoverRetriever;
import com.parnswir.unmp.core.C;
import com.parnswir.unmp.core.ImageLoader;
import com.parnswir.unmp.media.MediaPlayerStatus;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Locale;
public class PlayerFragment extends AbstractFragment {
private static int LAB_POSITION = 0, LAB_LENGTH = 1, LAB_TITLE = 2,
LAB_ARTIST = 3, LAB_ALBUM = 4;
private static int BTN_REPEAT = 0, BTN_PREV = 1, BTN_PLAY = 2,
BTN_NEXT = 3, BTN_SHUFFLE = 4;
private ArrayList<ImageButton> playerControls = new ArrayList<ImageButton>();
private ArrayList<TextView> playerLabels = new ArrayList<TextView>();
private SeekBar currentTitleProgress;
private RatingBar ratingBar;
private ImageLoader imageLoader;
private MediaPlayerStatus playerStatus = new MediaPlayerStatus();
private MediaPlayerStatus oldStatus = new MediaPlayerStatus();
private BroadcastReceiver statusBroadcastReceiver;
private boolean receiving = false;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
rootView = super.onCreateView(inflater, container, savedInstanceState);
inflate(R.layout.activity_main);
showActionBar();
imageLoader = new ImageLoader(activity, DB);
setupPlayerControls();
loadStatus();
updatePlayerStatus();
return rootView;
}
@Override
public void onStart() {
super.onStart();
setupIntentReceiver();
PlayerService.setPlayerServiceState(activity, PlayerService.HIDE_NOTIFICATION, null);
}
@Override
public void onPause() {
stopReceiving();
saveStatus();
if (playerStatus.playing) {
PlayerService.setPlayerServiceState(activity, PlayerService.START, null);
} else {
PlayerService.setPlayerServiceState(activity, PlayerService.STOP, null);
}
super.onPause();
}
private void saveStatus() {
FileOutputStream fos;
try {
fos = activity.openFileOutput(C.STATUS_FILE_NAME, Context.MODE_PRIVATE);
ObjectOutputStream os = new ObjectOutputStream(fos);
playerStatus.stopped = true;
playerStatus.paused = false;
os.writeObject(playerStatus);
os.close();
} catch (FileNotFoundException e) {
} catch (IOException e) {
}
}
private void loadStatus() {
MediaPlayerStatus status = new MediaPlayerStatus();
FileInputStream fis;
try {
fis = activity.openFileInput(C.STATUS_FILE_NAME);
ObjectInputStream is = new ObjectInputStream(fis);
status = (MediaPlayerStatus) is.readObject();
is.close();
} catch (FileNotFoundException e) {
} catch (IOException e) {
} catch (ClassNotFoundException e) {
}
if (status != null)
playerStatus = status;
}
public void setupPlayerControls() {
playerControls.clear();
int[] buttons = { R.id.btnRepeat, R.id.btnPrevious, R.id.btnPlay, R.id.btnNext, R.id.btnShuffle };
for (int button : buttons) {
playerControls.add((ImageButton) rootView.findViewById(button));
}
playerControls.get(BTN_PLAY).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (playerStatus.stopped) {
play();
} else {
pause();
}
}
});
playerControls.get(BTN_NEXT).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
PlayerService.setPlayerServiceState(activity, PlayerService.NEXT, null);
}
});
playerControls.get(BTN_PREV).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
PlayerService.setPlayerServiceState(activity, PlayerService.PREVIOUS, null);
}
});
playerControls.get(BTN_REPEAT).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
PlayerService.setPlayerServiceState(activity, PlayerService.TOGGLE_REPEAT, null);
}
});
playerControls.get(BTN_SHUFFLE).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
PlayerService.setPlayerServiceState(activity, PlayerService.TOGGLE_SHUFFLE, null);
}
});
playerLabels.clear();
int[] labels = { R.id.tvTime, R.id.tvTimeLeft, R.id.tvTitle, R.id.tvArtist, R.id.tvAlbum };
for (int label : labels) {
playerLabels.add((TextView) rootView.findViewById(label));
}
currentTitleProgress = (SeekBar) rootView.findViewById(R.id.seekBar);
ratingBar = (RatingBar) rootView.findViewById(R.id.ratingBar);
ratingBar.setMax(10);
}
public void updatePlayerStatus() {
setPlayIconTo(playerStatus.paused || playerStatus.stopped);
setRepeatIconTo(playerStatus.repeatMode);
setShuffleIconTo(playerStatus.shuffled);
showTitleDuration();
showCurrentPosition();
updateTitleInfo();
}
private void setPlayIconTo(boolean shown) {
Drawable icon;
Resources res = getResources();
if (shown) {
icon = res.getDrawable(R.drawable.ic_action_play);
} else {
icon = res.getDrawable(R.drawable.ic_action_pause);
}
playerControls.get(BTN_PLAY).setImageDrawable(icon);
}
private void setRepeatIconTo(int repeatMode) {
int[] icons = new int[] {R.drawable.ic_action_repeat_nothing, R.drawable.ic_action_repeat, R.drawable.ic_action_repeat_one};
playerControls.get(BTN_REPEAT).setImageDrawable(getResources().getDrawable(icons[repeatMode]));
}
private void setShuffleIconTo(boolean shuffled) {
Drawable icon;
Resources res = getResources();
if (shuffled) {
icon = res.getDrawable(R.drawable.ic_action_shuffle);
} else {
icon = res.getDrawable(R.drawable.ic_action_do_not_shuffle);
}
playerControls.get(BTN_SHUFFLE).setImageDrawable(icon);
}
private void showCurrentPosition() {
currentTitleProgress.setProgress(playerStatus.position);
playerLabels.get(LAB_POSITION).setText(formatPosition(playerStatus.position));
}
private void showTitleDuration() {
currentTitleProgress.setMax(playerStatus.length);
playerLabels.get(LAB_LENGTH).setText(formatPosition(playerStatus.length));
}
private String formatPosition(int position) {
int seconds = position / 1000;
return String.format(Locale.getDefault(), "%02d:%02d", seconds / 60, seconds % 60);
}
private void updateTitleInfo() {
if (! oldStatus.title.equals(playerStatus.title))
playerLabels.get(LAB_TITLE).setText(playerStatus.title);
if (! oldStatus.artist.equals(playerStatus.artist))
playerLabels.get(LAB_ARTIST).setText(playerStatus.artist);
if (! oldStatus.album.equals(playerStatus.album) && ! oldStatus.year.equals(playerStatus.year)) {
playerLabels.get(LAB_ALBUM).setText(String.format(Locale.getDefault(),
"%s [%s]", playerStatus.album, playerStatus.year));
setAlbumArt();
}
if (oldStatus.rating != playerStatus.rating)
setRating(playerStatus.rating);
}
private void setRating(int rating) {
ratingBar.setProgress(rating);
}
private void setAlbumArt() {
ImageView view = (ImageView) rootView.findViewById(R.id.ivCover);
view.setImageBitmap(ImageLoader.decodeBitmap(playerStatus.cover, false));
}
private void play() {
if (playerStatus.paused) {
PlayerService.setPlayerServiceState(activity, PlayerService.PLAY, null);
} else {
playPlaylist(playerStatus.playlist, playerStatus.position);
}
}
private void pause() {
PlayerService.setPlayerServiceState(activity, PlayerService.PAUSE, null);
}
private void setupIntentReceiver() {
if (!receiving) {
receiving = true;
statusBroadcastReceiver = new StatusIntentReceiver();
IntentFilter statusFilter = new IntentFilter(PlayerService.STATUS_INTENT);
activity.registerReceiver(statusBroadcastReceiver, statusFilter);
}
}
private void stopReceiving() {
if (statusBroadcastReceiver != null && receiving) {
activity.unregisterReceiver(statusBroadcastReceiver);
receiving = false;
}
}
private class StatusIntentReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (PlayerService.STATUS_INTENT.equals(intent.getAction())) {
playerStatus = (MediaPlayerStatus) intent.getSerializableExtra(PlayerService.EXTRA_STATUS);
updatePlayerStatus();
oldStatus = playerStatus;
}
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.edgent.test.connectors.kafka;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.edgent.connectors.kafka.KafkaConsumer;
import org.apache.edgent.connectors.kafka.KafkaProducer;
import org.apache.edgent.test.connectors.common.ConnectorTestBase;
import org.apache.edgent.topology.TSink;
import org.apache.edgent.topology.TStream;
import org.apache.edgent.topology.Topology;
import org.apache.edgent.topology.plumbing.PlumbingStreams;
import org.junit.Test;
public class KafkaStreamsTestManual extends ConnectorTestBase {
private static final int PUB_DELAY_MSEC = 4*1000;
private static final int SEC_TIMEOUT = 10;
private final String BASE_GROUP_ID = "kafkaStreamsTestGroupId";
private final String uniq = simpleTS();
private final String msg1 = "Hello";
private final String msg2 = "Are you there?";
public String getMsg1() {
return msg1;
}
public String getMsg2() {
return msg2;
}
private String[] getKafkaTopics() {
String csvTopics = System.getProperty("org.apache.edgent.test.connectors.kafka.csvTopics", "testTopic1,testTopic2");
String[] topics = csvTopics.split(",");
return topics;
}
private String getKafkaBootstrapServers() {
return System.getProperty("org.apache.edgent.test.connectors.kafka.bootstrap.servers", "localhost:9092");
}
private String getKafkaZookeeperConnect() {
return System.getProperty("org.apache.edgent.test.connectors.kafka.zookeeper.connect", "localhost:2181");
}
private String newGroupId(String name) {
String groupId = BASE_GROUP_ID + "_" + name + "_" + uniq.replaceAll(":", "");
System.out.println("["+simpleTS()+"] "
+ "Using Kafka consumer group.id " + groupId);
return groupId;
}
private Map<String,Object> newConsumerConfig(String groupId) {
Map<String,Object> config = new HashMap<>();
// unbaked 8.8.2 KafkaConsumer
// config.put("bootstrap.servers", getKafkaBootstrapServers());
config.put("zookeeper.connect", getKafkaZookeeperConnect());
config.put("group.id", groupId);
return config;
}
private Map<String,Object> newProducerConfig() {
Map<String,Object> config = new HashMap<>();
config.put("bootstrap.servers", getKafkaBootstrapServers());
return config;
}
private static class Rec {
String topic;
int partition;
String key;
String value;
Rec(String topic, int partition, String key, String value) {
this.topic = topic;
this.key = key;
this.value = value;
}
public String toString() {
return "topic:"+topic+" partition:"+partition+" key:"+key+" value:"+value;
}
}
@Test
public void testSimple() throws Exception {
Topology t = newTopology("testSimple");
MsgGenerator mgen = new MsgGenerator(t.getName());
String topic = getKafkaTopics()[0];
String groupId = newGroupId(t.getName());
List<String> msgs = createMsgs(mgen, topic, getMsg1(), getMsg2());
TStream<String> s = PlumbingStreams.blockingOneShotDelay(
t.collection(msgs), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
Map<String,Object> pConfig = newProducerConfig();
KafkaProducer producer = new KafkaProducer(t, () -> pConfig);
TSink<String> sink = producer.publish(s, topic);
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
TStream<String> rcvd = consumer.subscribe(
rec -> rec.value(),
topic);
completeAndValidate("", t, rcvd, mgen, SEC_TIMEOUT, msgs.toArray(new String[0]));
assertNotNull(sink);
}
@Test
public void testWithKey() throws Exception {
Topology t = newTopology("testWithKey");
MsgGenerator mgen = new MsgGenerator(t.getName());
String topic = getKafkaTopics()[0];
String groupId = newGroupId(t.getName());
List<String> msgs = createMsgs(mgen, topic, getMsg1(), getMsg2());
List<Rec> recs = new ArrayList<>();
int i = 0;
for (String msg : msgs) {
recs.add(new Rec(topic, 0, "key-" + ++i, msg));
}
List<String> expected = new ArrayList<>();
for (Rec rec : recs) {
expected.add(rec.toString());
}
// Test publish with key
// Also exercise ConsumerRecord accessors
TStream<Rec> s = PlumbingStreams.blockingOneShotDelay(
t.collection(recs), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
Map<String,Object> pConfig = newProducerConfig();
KafkaProducer producer = new KafkaProducer(t, () -> pConfig);
producer.publish(s,
tuple -> tuple.key,
tuple -> tuple.value,
tuple -> tuple.topic,
tuple -> tuple.partition);
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
TStream<String> rcvd = consumer.subscribe(
rec -> new Rec(rec.topic(),
rec.partition(),
rec.key(),
rec.value()).toString(),
topic);
completeAndValidate("", t, rcvd, mgen, SEC_TIMEOUT, expected.toArray(new String[0]));
}
@Test
public void testPubSubBytes() throws Exception {
Topology t = newTopology("testPubSubBytes");
MsgGenerator mgen = new MsgGenerator(t.getName());
String topic = getKafkaTopics()[0];
String groupId = newGroupId(t.getName());
List<String> msgs = createMsgs(mgen, topic, getMsg1(), getMsg2());
List<Rec> recs = new ArrayList<>();
int i = 0;
for (String msg : msgs) {
recs.add(new Rec(topic, 0, "key-" + ++i, msg));
}
List<String> expected = new ArrayList<>();
for (Rec rec : recs) {
expected.add(rec.toString());
}
// Test publishBytes() / subscribeBytes()
TStream<Rec> s = PlumbingStreams.blockingOneShotDelay(
t.collection(recs), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
Map<String,Object> pConfig = newProducerConfig();
KafkaProducer producer = new KafkaProducer(t, () -> pConfig);
producer.publishBytes(s,
tuple -> tuple.key.getBytes(StandardCharsets.UTF_8),
tuple -> tuple.value.getBytes(StandardCharsets.UTF_8),
tuple -> tuple.topic,
tuple -> tuple.partition);
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
TStream<String> rcvd = consumer.subscribeBytes(
rec -> new Rec(rec.topic(),
rec.partition(),
new String(rec.key(), StandardCharsets.UTF_8),
new String(rec.value(), StandardCharsets.UTF_8)).toString(),
topic);
completeAndValidate("", t, rcvd, mgen, SEC_TIMEOUT, expected.toArray(new String[0]));
}
@Test
public void testMultiPub() throws Exception {
Topology t = newTopology("testMultiPub");
MsgGenerator mgen = new MsgGenerator(t.getName());
String topic1 = getKafkaTopics()[0];
String topic2 = getKafkaTopics()[1];
String groupId = newGroupId(t.getName());
List<String> msgs1 = createMsgs(mgen, topic1, getMsg1(), getMsg2());
List<String> msgs2 = createMsgs(mgen, topic2, getMsg1(), getMsg2());
List<String> msgs = new ArrayList<>(msgs1);
msgs.addAll(msgs2);
// Multiple publish() on a single connection.
// Also multi-topic subscribe().
TStream<String> s1 = PlumbingStreams.blockingOneShotDelay(
t.collection(msgs1), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
TStream<String> s2 = PlumbingStreams.blockingOneShotDelay(
t.collection(msgs2), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
Map<String,Object> pConfig = newProducerConfig();
KafkaProducer producer = new KafkaProducer(t, () -> pConfig);
TSink<String> sink1 = producer.publish(s1, topic1);
TSink<String> sink2 = producer.publish(s2, topic2);
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
TStream<String> rcvd = consumer.subscribe(
rec -> rec.value(),
topic1, topic2);
completeAndValidate(false/*ordered*/, "", t, rcvd, mgen, SEC_TIMEOUT, msgs.toArray(new String[0]));
assertNotNull(sink1);
assertNotNull(sink2);
assertNotSame(sink1, sink2);
}
@Test(expected=IllegalStateException.class)
public void testMultiSubNeg() throws Exception {
Topology t = newTopology("testMultiSubNeg");
MsgGenerator mgen = new MsgGenerator(t.getName());
String topic1 = getKafkaTopics()[0];
String topic2 = getKafkaTopics()[1];
String groupId = newGroupId(t.getName());
List<String> msgs1 = createMsgs(mgen, topic1, getMsg1(), getMsg2());
List<String> msgs2 = createMsgs(mgen, topic2, getMsg1(), getMsg2());
// Multiple subscribe() on a single connection.
// Currently, w/Kafka0.8.2.2, we only support a single
// subscriber on the connection and an IllegalStateException
// is thrown.
// This restriction will be removed when we migrate to Kafka 0.9.0.0
TStream<String> s1 = PlumbingStreams.blockingOneShotDelay(
t.collection(msgs1), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
TStream<String> s2 = PlumbingStreams.blockingOneShotDelay(
t.collection(msgs2), PUB_DELAY_MSEC, TimeUnit.MILLISECONDS);
Map<String,Object> pConfig = newProducerConfig();
KafkaProducer producer = new KafkaProducer(t, () -> pConfig);
producer.publish(s1, topic1);
producer.publish(s2, topic2);
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
@SuppressWarnings("unused")
TStream<String> rcvd1 = consumer.subscribe(
rec -> rec.value(),
topic1);
@SuppressWarnings("unused")
TStream<String> rcvd2 = consumer.subscribe(
rec -> rec.value(),
topic2);
// TODO see "single subscribe" restriction above
// // TODO union() is NYI
//// TStream<String> rcvd = rcvd1.union(rcvd2);
////
//// completeAndValidate(false/*ordered*/, "", t, rcvd, mgen, SEC_TIMEOUT, msgs.toArray(new String[0]));
//
// Condition<Long> tc1 = t.getTester().tupleCount(rcvd1, msgs1.size());
// Condition<Long> tc2 = t.getTester().tupleCount(rcvd2, msgs2.size());
//
// List<Condition<Long>> conditions = new ArrayList<>();
// conditions.add(tc1);
// conditions.add(tc2);
// Condition<?> tc = tc1.and(tc2);
//
// Condition<List<String>> contents1 = t.getTester().streamContents(rcvd1, msgs1.toArray(new String[0]));
// Condition<List<String>> contents2 = t.getTester().streamContents(rcvd2, msgs2.toArray(new String[0]));
//
// complete(t, tc, SEC_TIMEOUT, TimeUnit.SECONDS);
//
// assertTrue(groupId + " contents1:" + contents1.getResult(), contents1.valid());
// assertTrue(groupId + " contents2:" + contents2.getResult(), contents2.valid());
// assertTrue("valid:" + tc, tc.valid());
}
@Test(expected=IllegalArgumentException.class)
public void testNoTopicSubNeg() throws Exception {
Topology t = newTopology("testNoTopicSubNeg");
String groupId = newGroupId(t.getName());
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
consumer.subscribe(rec -> rec.value()/*, "topic1"*/);
}
@Test(expected=IllegalArgumentException.class)
public void testDupTopicSub1Neg() throws Exception {
Topology t = newTopology("testDupTopicSub1Neg");
String groupId = newGroupId(t.getName());
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
consumer.subscribe(rec -> rec.value(), "topic1", "topic1");
}
@Test(expected=IllegalArgumentException.class)
public void testDupTopicSub2Neg() throws Exception {
Topology t = newTopology("testDupTopicSub2Neg");
String groupId = newGroupId(t.getName());
Map<String,Object> cConfig = newConsumerConfig(groupId);
KafkaConsumer consumer = new KafkaConsumer(t, () -> cConfig);
consumer.subscribe(rec -> rec.value(), "topic1");
consumer.subscribe(rec -> rec.value(), "topic1");
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.spdy.api;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
public class Settings implements Iterable<Settings.Setting>
{
private final Map<ID, Settings.Setting> settings;
public Settings()
{
settings = new HashMap<>();
}
public Settings(Settings original, boolean immutable)
{
Map<ID, Settings.Setting> copy = new HashMap<>(original.size());
copy.putAll(original.settings);
settings = immutable ? Collections.unmodifiableMap(copy) : copy;
}
public Setting get(ID id)
{
return settings.get(id);
}
public void put(Setting setting)
{
settings.put(setting.id(), setting);
}
public Setting remove(ID id)
{
return settings.remove(id);
}
public int size()
{
return settings.size();
}
public void clear()
{
settings.clear();
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
Settings that = (Settings)obj;
return settings.equals(that.settings);
}
@Override
public int hashCode()
{
return settings.hashCode();
}
@Override
public Iterator<Setting> iterator()
{
return settings.values().iterator();
}
@Override
public String toString()
{
return settings.toString();
}
public static final class ID
{
public static final ID UPLOAD_BANDWIDTH = new ID(1);
public static final ID DOWNLOAD_BANDWIDTH = new ID(2);
public static final ID ROUND_TRIP_TIME = new ID(3);
public static final ID MAX_CONCURRENT_STREAMS = new ID(4);
public static final ID CURRENT_CONGESTION_WINDOW = new ID(5);
public static final ID DOWNLOAD_RETRANSMISSION_RATE = new ID(6);
public static final ID INITIAL_WINDOW_SIZE = new ID(7);
public synchronized static ID from(int code)
{
ID id = Codes.codes.get(code);
if (id == null)
id = new ID(code);
return id;
}
private final int code;
private ID(int code)
{
this.code = code;
Codes.codes.put(code, this);
}
public int code()
{
return code;
}
@Override
public String toString()
{
return String.valueOf(code);
}
private static class Codes
{
private static final Map<Integer, ID> codes = new HashMap<>();
}
}
public static enum Flag
{
NONE((byte)0),
PERSIST((byte)1),
PERSISTED((byte)2);
public static Flag from(byte code)
{
return Codes.codes.get(code);
}
private final byte code;
private Flag(byte code)
{
this.code = code;
Codes.codes.put(code, this);
}
public byte code()
{
return code;
}
private static class Codes
{
private static final Map<Byte, Flag> codes = new HashMap<>();
}
}
public static class Setting
{
private final ID id;
private final Flag flag;
private final int value;
public Setting(ID id, int value)
{
this(id, Flag.NONE, value);
}
public Setting(ID id, Flag flag, int value)
{
this.id = id;
this.flag = flag;
this.value = value;
}
public ID id()
{
return id;
}
public Flag flag()
{
return flag;
}
public int value()
{
return value;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
Setting that = (Setting)obj;
return value == that.value && flag == that.flag && id == that.id;
}
@Override
public int hashCode()
{
int result = id.hashCode();
result = 31 * result + flag.hashCode();
result = 31 * result + value;
return result;
}
@Override
public String toString()
{
return String.format("[id=%s,flags=%s:value=%d]", id(), flag(), value());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.atlasmap;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.atlasmap.api.AtlasContext;
import io.atlasmap.api.AtlasContextFactory;
import io.atlasmap.api.AtlasException;
import io.atlasmap.api.AtlasSession;
import io.atlasmap.v2.Audit;
import io.atlasmap.v2.DataSource;
import io.atlasmap.v2.DataSourceType;
import org.apache.camel.Category;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Message;
import org.apache.camel.component.ResourceEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.atlasmap.api.AtlasContextFactory.Format.ADM;
import static io.atlasmap.api.AtlasContextFactory.Format.JSON;
/**
* Transforms the message using an AtlasMap transformation.
*/
@UriEndpoint(firstVersion = "3.7.0", scheme = "atlasmap", title = "AtlasMap", syntax = "atlasmap:resourceUri",
producerOnly = true, category = { Category.TRANSFORMATION })
public class AtlasMapEndpoint extends ResourceEndpoint {
public static final String CONTENT_TYPE_JSON = "application/json";
public static final String CONTENT_TYPE_XML = "application/xml";
private static final Logger LOG = LoggerFactory.getLogger(AtlasMapEndpoint.class);
private AtlasContextFactory atlasContextFactory;
private AtlasContext atlasContext;
@UriParam
private String sourceMapName;
@UriParam
private String targetMapName;
@UriParam(defaultValue = "MAP")
private TargetMapMode targetMapMode = TargetMapMode.MAP;
@UriParam(defaultValue = "false")
private boolean forceReload;
public enum TargetMapMode {
MAP,
MESSAGE_HEADER,
EXCHANGE_PROPERTY;
}
public AtlasMapEndpoint(String uri, AtlasMapComponent component, String resourceUri) {
super(uri, component, resourceUri);
}
@Override
public ExchangePattern getExchangePattern() {
return ExchangePattern.InOut;
}
@Override
protected String createEndpointUri() {
return "atlasmap:" + getResourceUri();
}
public AtlasContextFactory getAtlasContextFactory() {
return this.atlasContextFactory;
}
public void setAtlasContextFactory(AtlasContextFactory atlasContextFactory) {
this.atlasContextFactory = atlasContextFactory;
}
public AtlasContext getAtlasContext() {
return this.atlasContext;
}
public void setAtlasContext(AtlasContext atlasContext) {
this.atlasContext = atlasContext;
}
/**
* The Exchange property name for a source message map which hold <code>java.util.Map<String, Message></code>
* where the key is AtlasMap Document ID. AtlasMap consumes Message bodies as source documents, as well as message
* headers as source properties where the scope equals to Document ID.
*
* @param name Exchange property name for source map
*/
public void setSourceMapName(String name) {
this.sourceMapName = name;
}
public String getSourceMapName() {
return this.sourceMapName;
}
/**
* The Exchange property name for a target document map which hold <code>java.util.Map<String, Object></code>
* where the key is AtlasMap Document ID. AtlasMap populates multiple target documents into this map.
*
* @param name Exchange property name for target map
*/
public void setTargetMapName(String name) {
this.targetMapName = name;
}
public String getTargetMapName() {
return this.targetMapName;
}
/**
* {@link TargetMapMode} enum value to specify how multiple target documents are delivered if exist.
* <ul>
* <li>'MAP': Stores them into a java.util.Map, and the java.util.Map is set to an exchange" property if
* 'targetMapName' is specified, otherwise message body.</li>"
* <li>'MESSAGE_HEADER': Stores them into message headers.</li>"
* <li>'EXCHANGE_PROPERTY': Stores them into exchange properties.</li>
* </ul>
* ")
*
* @param mode {@link TargetMapMode}
*/
public void setTargetMapMode(TargetMapMode mode) {
this.targetMapMode = mode;
}
public TargetMapMode getTargetMapMode() {
return this.targetMapMode;
}
/**
* Whether to enable or disable force reload mode. This is set to false by default and ADM file is loaded from a
* file only on a first Exchange, and AtlasContext will be reused after that until endpoint is recreated. If this is
* set to true, ADM file will be loaded from a file on every Exchange.
*
* @param forceReload true to enable force reload
*/
public void setForceReload(boolean forceReload) {
this.forceReload = forceReload;
}
public boolean isForceReload() {
return forceReload;
}
public AtlasMapEndpoint findOrCreateEndpoint(String uri, String newResourceUri) {
String newUri = uri.replace(getResourceUri(), newResourceUri);
log.debug("Getting endpoint with URI: {}", newUri);
return getCamelContext().getEndpoint(newUri, AtlasMapEndpoint.class);
}
@Override
protected void onExchange(Exchange exchange) throws Exception {
Message incomingMessage = exchange.getIn();
String newResourceUri = incomingMessage.getHeader(AtlasMapConstants.ATLAS_RESOURCE_URI, String.class);
if (newResourceUri != null) {
incomingMessage.removeHeader(AtlasMapConstants.ATLAS_RESOURCE_URI);
log.debug("{} set to {} creating new endpoint to handle exchange", AtlasMapConstants.ATLAS_RESOURCE_URI,
newResourceUri);
AtlasMapEndpoint newEndpoint = findOrCreateEndpoint(getEndpointUri(), newResourceUri);
newEndpoint.onExchange(exchange);
return;
}
AtlasSession atlasSession = getOrCreateAtlasContext(incomingMessage).createSession();
populateSourceDocuments(exchange, atlasSession);
atlasSession.getAtlasContext().process(atlasSession);
List<Audit> errors = new ArrayList<>();
for (Audit audit : atlasSession.getAudits().getAudit()) {
switch (audit.getStatus()) {
case ERROR:
errors.add(audit);
break;
case WARN:
LOG.warn("{}: Document='{}(ID:{})', path='{}'",
audit.getMessage(), audit.getDocName(), audit.getDocId(), audit.getPath());
break;
default:
LOG.info("{}: Document='{}(ID:{})', path='{}'",
audit.getMessage(), audit.getDocName(), audit.getDocId(), audit.getPath());
}
}
if (!errors.isEmpty()) {
StringBuilder buf = new StringBuilder("Errors: ");
errors.stream().forEach(a -> buf.append(
String.format("[%s: Document='%s(ID:%s)', path='%s'], ",
a.getMessage(), a.getDocName(), a.getDocId(), a.getPath())));
throw new AtlasException(buf.toString());
}
populateTargetDocuments(atlasSession, exchange);
}
private AtlasContext getOrCreateAtlasContext(Message incomingMessage) throws Exception {
String path = getResourceUri();
ObjectHelper.notNull(path, "mappingUri");
String content = incomingMessage.getHeader(AtlasMapConstants.ATLAS_MAPPING, String.class);
if (content != null) {
// use content from header
InputStream is = new ByteArrayInputStream(content.getBytes());
if (log.isDebugEnabled()) {
log.debug("Atlas mapping content read from header {} for endpoint {}", AtlasMapConstants.ATLAS_MAPPING,
getEndpointUri());
}
// remove the header to avoid it being propagated in the routing
incomingMessage.removeHeader(AtlasMapConstants.ATLAS_MAPPING);
return atlasContextFactory.createContext(JSON, is);
} else if (getAtlasContext() != null && !forceReload) {
// no mapping specified in header, and found an existing context
return getAtlasContext();
}
// No mapping in header, and no existing context or force reload is enabled. Create new one from resourceUri
if (log.isDebugEnabled()) {
log.debug("Atlas mapping content read from resourceUri: {} for endpoint {}",
path, getEndpointUri());
}
atlasContext = atlasContextFactory.createContext(
path.toLowerCase().endsWith("adm") ? ADM : JSON, getResourceAsInputStream());
return atlasContext;
}
private void populateSourceDocuments(Exchange exchange, AtlasSession session) {
if (session.getMapping().getDataSource() == null) {
return;
}
Message inMessage = exchange.getIn();
CamelAtlasPropertyStrategy propertyStrategy = new CamelAtlasPropertyStrategy();
propertyStrategy.setCurrentSourceMessage(inMessage);
propertyStrategy.setTargetMessage(exchange.getMessage());
propertyStrategy.setExchange(exchange);
session.setAtlasPropertyStrategy(propertyStrategy);
DataSource[] sourceDataSources = session.getMapping().getDataSource().stream()
.filter(ds -> ds.getDataSourceType() == DataSourceType.SOURCE)
.toArray(DataSource[]::new);
if (sourceDataSources.length == 0) {
session.setDefaultSourceDocument(inMessage.getBody());
return;
}
if (sourceDataSources.length == 1) {
String docId = sourceDataSources[0].getId();
Object payload = extractPayload(sourceDataSources[0], inMessage);
if (docId == null || docId.isEmpty()) {
session.setDefaultSourceDocument(payload);
} else {
session.setSourceDocument(docId, payload);
propertyStrategy.setSourceMessage(docId, inMessage);
}
return;
}
Map<String, Message> sourceMessages = null;
Map<String, Object> sourceDocuments = null;
if (sourceMapName != null) {
sourceMessages = exchange.getProperty(sourceMapName, Map.class);
}
if (sourceMessages == null) {
Object body = inMessage.getBody();
if (body instanceof Map) {
sourceDocuments = (Map<String, Object>) body;
} else {
session.setDefaultSourceDocument(body);
}
}
for (DataSource ds : sourceDataSources) {
String docId = ds.getId();
if (docId == null || docId.isEmpty()) {
Object payload = extractPayload(ds, inMessage);
session.setDefaultSourceDocument(payload);
} else if (sourceMessages != null) {
Object payload = extractPayload(ds, sourceMessages.get(docId));
session.setSourceDocument(docId, payload);
propertyStrategy.setSourceMessage(docId, sourceMessages.get(docId));
} else if (sourceDocuments != null) {
Object payload = sourceDocuments.get(docId);
session.setSourceDocument(docId, payload);
} else if (inMessage.getHeaders().containsKey(docId)) {
Object payload = inMessage.getHeader(docId);
session.setSourceDocument(docId, payload);
} else if (exchange.getProperties().containsKey(docId)) {
Object payload = exchange.getProperty(docId);
session.setSourceDocument(docId, payload);
} else {
LOG.warn("Ignoring missing source document: '{}(ID:{})'", ds.getName(), ds.getId());
}
}
}
private Object extractPayload(final DataSource dataSource, Message message) {
if (dataSource == null || message == null) {
return null;
}
Object body = null;
if (dataSource.getUri() != null
&& !(dataSource.getUri().startsWith("atlas:core")
|| dataSource.getUri().startsWith("atlas:java"))) {
body = message.getBody(String.class);
} else {
body = message.getBody();
}
//Just in case, prepare for future calls
MessageHelper.resetStreamCache(message);
return body;
}
private void populateTargetDocuments(AtlasSession session, Exchange exchange) {
Message message = exchange.getMessage();
if (session.getMapping().getDataSource() == null) {
return;
}
DataSource[] targetDataSources = session.getMapping().getDataSource().stream()
.filter(ds -> ds.getDataSourceType() == DataSourceType.TARGET)
.toArray(DataSource[]::new);
if (targetDataSources.length == 0) {
Object newBody = session.getDefaultTargetDocument();
message.setBody(newBody);
return;
}
if (targetDataSources.length == 1) {
String docId = targetDataSources[0].getId();
if (docId == null || docId.isEmpty()) {
Object newBody = session.getDefaultTargetDocument();
message.setBody(newBody);
} else {
Object newBody = session.getTargetDocument(docId);
message.setBody(newBody);
}
setContentType(targetDataSources[0], message);
return;
}
Map<String, Object> targetDocuments = new HashMap<>();
for (DataSource ds : targetDataSources) {
String docId = ds.getId();
if (docId == null || docId.isEmpty()) {
targetDocuments.put(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID,
session.getDefaultTargetDocument());
Object newBody = session.getDefaultTargetDocument();
message.setBody(newBody);
setContentType(ds, message);
} else {
targetDocuments.put(docId, session.getTargetDocument(docId));
}
}
switch (targetMapMode) {
case MAP:
if (targetMapName != null) {
exchange.setProperty(targetMapName, targetDocuments);
} else {
message.setBody(targetDocuments);
}
break;
case MESSAGE_HEADER:
targetDocuments.remove(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID);
message.getHeaders().putAll(targetDocuments);
break;
case EXCHANGE_PROPERTY:
targetDocuments.remove(io.atlasmap.api.AtlasConstants.DEFAULT_TARGET_DOCUMENT_ID);
exchange.getProperties().putAll(targetDocuments);
break;
default:
throw new IllegalArgumentException("Unknown targetMapMode: " + targetMapMode.name());
}
}
private void setContentType(DataSource ds, Message message) {
if (ds.getUri() == null) {
return;
}
if (ds.getUri().startsWith("atlas:json")) {
message.setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_JSON);
} else if (ds.getUri().startsWith("atlas:xml")) {
message.setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_XML);
}
}
}
| |
package be.doji.productivity.trambucore.managers;
import be.doji.productivity.trambucore.TrambuTest;
import be.doji.productivity.trambucore.model.tracker.ActivityLog;
import be.doji.productivity.trambucore.model.tracker.TimeLog;
import be.doji.productivity.trambucore.testutil.FileUtils;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.ParseException;
import java.util.*;
public class TimeTrackingManagerTest extends TrambuTest {
private static final String FILE_TIME_LOGS_TEST = "data/testTimeLog.txt";
/* Unit tests for basic functionality */
@Test public void testReadLogs() throws IOException, ParseException {
String testPath = FileUtils.getTestPath(FILE_TIME_LOGS_TEST, this.getClass().getClassLoader());
TimeTrackingManager tm = new TimeTrackingManager(testPath);
tm.readLogs();
ActivityLog parsedActivityLog = tm.getLogForActivityId("fa183c05-fb22-4411-8f94-12c954484f22");
Assert.assertNotNull(parsedActivityLog);
Assert.assertEquals("fa183c05-fb22-4411-8f94-12c954484f22", parsedActivityLog.getActivityId().toString());
List<TimeLog> parsedLogPoints = parsedActivityLog.getLogpoints();
Assert.assertNotNull(parsedLogPoints);
Assert.assertEquals(1, parsedLogPoints.size());
}
@Test public void testUpdateFileEmptyFile() throws IOException, ParseException {
String testPath = FileUtils.getTestPath(FILE_TIME_LOGS_TEST, this.getClass().getClassLoader());
TimeTrackingManager tm = new TimeTrackingManager(testPath);
tm.readLogs();
Assert.assertFalse(tm.getLogs().isEmpty());
Assert.assertEquals(1, tm.getLogs().size());
Path tempFile = createTempFile();
tm.updateFileLocation(tempFile.toString());
Assert.assertTrue(tm.getLogs().isEmpty());
Files.delete(tempFile);
}
@Test public void testWriteLogs() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
Assert.assertTrue(tm.getLogs().isEmpty());
tm.writeLogs();
Assert.assertTrue(Files.readAllLines(tempFile).isEmpty());
ActivityLog testLog = new ActivityLog(UUID.randomUUID());
tm.save(testLog);
tm.writeLogs();
List<String> linesAfterWrite = Files.readAllLines(tempFile);
Assert.assertFalse(linesAfterWrite.isEmpty());
Assert.assertEquals(2, linesAfterWrite.size());
Files.delete(tempFile);
}
@Test public void testStopAll() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
ActivityLog testLogActiveOne = new ActivityLog(UUID.randomUUID());
testLogActiveOne.startLog();
Assert.assertTrue(testLogActiveOne.getActiveLog().isPresent());
tm.save(testLogActiveOne);
ActivityLog testLogActiveTwo = new ActivityLog(UUID.randomUUID());
testLogActiveTwo.startLog();
Assert.assertTrue(testLogActiveTwo.getActiveLog().isPresent());
tm.save(testLogActiveTwo);
List<ActivityLog> allLogsBeforeStop = tm.getLogs();
Assert.assertFalse(allLogsBeforeStop.isEmpty());
Assert.assertEquals(2, allLogsBeforeStop.size());
for (ActivityLog log : allLogsBeforeStop) {
Assert.assertTrue(log.getActiveLog().isPresent());
}
tm.stopAll();
List<ActivityLog> allLogsAfterStop = tm.getLogs();
Assert.assertFalse(allLogsAfterStop.isEmpty());
Assert.assertEquals(2, allLogsAfterStop.size());
for (ActivityLog log : allLogsAfterStop) {
Assert.assertFalse(log.getActiveLog().isPresent());
}
Files.delete(tempFile);
}
/* Unit tests for overview functionality */
@Test public void getActivityForIntervalOneFits() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
UUID activityOneId = UUID.randomUUID();
ActivityLog logActivityOne = new ActivityLog(activityOneId);
Calendar logOneStart = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 14, 0, 0);
Calendar logOneEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 18, 0, 0);
logActivityOne.addLogPoint(createTimeLog(logOneStart.getTime(), logOneEnd.getTime()));
UUID activityTwoId = UUID.randomUUID();
ActivityLog logActivityTwo = new ActivityLog(activityTwoId);
Calendar logTwoStart = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 14, 0, 0);
Calendar logTwoEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 19, 0, 0);
logActivityTwo.addLogPoint(createTimeLog(logTwoStart.getTime(), logTwoEnd.getTime()));
tm.save(logActivityOne);
tm.save(logActivityTwo);
Calendar overviewStartDate = new GregorianCalendar(2017, Calendar.DECEMBER, 1);
Calendar overviewEndDate = new GregorianCalendar(2017, Calendar.DECEMBER, 2);
List<ActivityLog> overviewLogs = tm
.getActivityLogsInInterval(overviewStartDate.getTime(), overviewEndDate.getTime());
Assert.assertNotNull(overviewLogs);
Assert.assertEquals(1, overviewLogs.size());
Assert.assertEquals(activityOneId, overviewLogs.get(0).getActivityId());
Files.delete(tempFile);
}
@Test public void getActivityForIntervalAllFit() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
UUID activityOneId = UUID.randomUUID();
ActivityLog logActivityOne = new ActivityLog(activityOneId);
Calendar logOneStart = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 14, 0, 0);
Calendar logOneEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 18, 0, 0);
logActivityOne.addLogPoint(createTimeLog(logOneStart.getTime(), logOneEnd.getTime()));
UUID activityTwoId = UUID.randomUUID();
ActivityLog logActivityTwo = new ActivityLog(activityTwoId);
Calendar logTwoStart = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 14, 0, 0);
Calendar logTwoEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 19, 0, 0);
logActivityTwo.addLogPoint(createTimeLog(logTwoStart.getTime(), logTwoEnd.getTime()));
tm.save(logActivityOne);
tm.save(logActivityTwo);
Calendar overviewStartDate = new GregorianCalendar(2017, Calendar.DECEMBER, 1);
Calendar overviewEndDate = new GregorianCalendar(2017, Calendar.DECEMBER, 7);
List<ActivityLog> overviewLogs = tm
.getActivityLogsInInterval(overviewStartDate.getTime(), overviewEndDate.getTime());
Assert.assertNotNull(overviewLogs);
Assert.assertEquals(2, overviewLogs.size());
Assert.assertEquals(activityOneId, overviewLogs.get(0).getActivityId());
Assert.assertEquals(activityTwoId, overviewLogs.get(1).getActivityId());
Files.delete(tempFile);
}
@Test public void getActivityForIntervalNoneFit() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
UUID activityOneId = UUID.randomUUID();
ActivityLog logActivityOne = new ActivityLog(activityOneId);
Calendar logOneStart = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 14, 0, 0);
Calendar logOneEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 1, 18, 0, 0);
logActivityOne.addLogPoint(createTimeLog(logOneStart.getTime(), logOneEnd.getTime()));
UUID activityTwoId = UUID.randomUUID();
ActivityLog logActivityTwo = new ActivityLog(activityTwoId);
Calendar logTwoStart = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 14, 0, 0);
Calendar logTwoEnd = new GregorianCalendar(2017, Calendar.DECEMBER, 4, 19, 0, 0);
logActivityTwo.addLogPoint(createTimeLog(logTwoStart.getTime(), logTwoEnd.getTime()));
tm.save(logActivityOne);
tm.save(logActivityTwo);
Calendar overviewStartDate = new GregorianCalendar(2017, Calendar.DECEMBER, 20);
Calendar overviewEndDate = new GregorianCalendar(2017, Calendar.DECEMBER, 30);
List<ActivityLog> overviewLogs = tm
.getActivityLogsInInterval(overviewStartDate.getTime(), overviewEndDate.getTime());
Assert.assertNotNull(overviewLogs);
Assert.assertTrue(overviewLogs.isEmpty());
Files.delete(tempFile);
}
@Test public void getActivityForIntervalActiveItemIssueCurrentDate() throws IOException {
Path tempFile = createTempFile();
TimeTrackingManager tm = new TimeTrackingManager(tempFile.toString());
Calendar today = new GregorianCalendar();
UUID activityOneId = UUID.randomUUID();
ActivityLog logActivityOne = new ActivityLog(activityOneId);
Calendar logOneStart = new GregorianCalendar(1999, Calendar.DECEMBER, 1, 14, 0, 0);
Calendar logOneEnd = new GregorianCalendar(1999, Calendar.DECEMBER, 1, 18, 0, 0);
logActivityOne.addLogPoint(createTimeLog(logOneStart.getTime(), logOneEnd.getTime()));
UUID activityTwoId = UUID.randomUUID();
ActivityLog logActivityTwo = new ActivityLog(activityTwoId);
Calendar logTwoStart = new GregorianCalendar(1999, Calendar.DECEMBER, 4, 14, 0, 0);
TimeLog timelogTwo = new TimeLog();
timelogTwo.setActive(true);
timelogTwo.setStartTime(logTwoStart.getTime());
logActivityTwo.addLogPoint(timelogTwo);
tm.save(logActivityOne);
tm.save(logActivityTwo);
Calendar overviewStart = new GregorianCalendar(1900, Calendar.OCTOBER, 1);
Calendar overViewEnd = new GregorianCalendar();
overViewEnd.setTime(today.getTime());
overViewEnd.add(Calendar.HOUR, 100);
List<ActivityLog> activityLogsInInterval = tm
.getActivityLogsInInterval(overviewStart.getTime(), overViewEnd.getTime());
Assert.assertNotNull(activityLogsInInterval);
Assert.assertEquals(2, activityLogsInInterval.size());
ActivityLog savedActivityLogTwo = activityLogsInInterval.get(1);
Assert.assertFalse(savedActivityLogTwo.getActiveLog().isPresent());
List<TimeLog> savedLogPoints = savedActivityLogTwo.getLogpoints();
Assert.assertNotNull(savedLogPoints);
Assert.assertEquals(1, savedLogPoints.size());
TimeLog savedLogPoint = savedLogPoints.get(0);
Assert.assertNotNull(savedLogPoint);
Date retrievedEndTime = savedLogPoint.getEndTime();
Assert.assertNotNull(retrievedEndTime);
Calendar compareObject = new GregorianCalendar();
compareObject.setTime(retrievedEndTime);
Assert.assertEquals(today.get(Calendar.YEAR),compareObject.get(Calendar.YEAR));
Assert.assertEquals(today.get(Calendar.MONTH),compareObject.get(Calendar.MONTH));
Assert.assertEquals(today.get(Calendar.DAY_OF_MONTH),compareObject.get(Calendar.DAY_OF_MONTH));
Assert.assertEquals(today.get(Calendar.HOUR),compareObject.get(Calendar.HOUR));
Files.delete(tempFile);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.io.Serializable;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import org.apache.ignite.GridTestTask;
import org.apache.ignite.Ignite;
import org.apache.ignite.cluster.ClusterMetrics;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.processors.task.GridInternal;
import org.apache.ignite.internal.util.lang.GridAbsPredicate;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.messaging.MessagingListenActor;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import static org.apache.ignite.events.EventType.EVT_NODE_METRICS_UPDATED;
/**
* Grid node metrics self test.
*/
@GridCommonTest(group = "Kernal Self")
public class ClusterNodeMetricsSelfTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** Test message size. */
private static final int MSG_SIZE = 1024;
/** Number of messages. */
private static final int MSG_CNT = 3;
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
startGrid();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
TcpDiscoverySpi spi = new TcpDiscoverySpi();
spi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(spi);
cfg.setCacheConfiguration();
cfg.setMetricsUpdateFrequency(0);
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testSingleTaskMetrics() throws Exception {
Ignite ignite = grid();
ignite.compute().execute(new GridTestTask(), "testArg");
// Let metrics update twice.
final CountDownLatch latch = new CountDownLatch(2);
ignite.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
assert evt.type() == EVT_NODE_METRICS_UPDATED;
latch.countDown();
return true;
}
}, EVT_NODE_METRICS_UPDATED);
// Wait for metrics update.
latch.await();
ClusterMetrics metrics = ignite.cluster().localNode().metrics();
info("Node metrics: " + metrics);
assert metrics.getAverageActiveJobs() > 0;
assert metrics.getAverageCancelledJobs() == 0;
assert metrics.getAverageJobExecuteTime() >= 0;
assert metrics.getAverageJobWaitTime() >= 0;
assert metrics.getAverageRejectedJobs() == 0;
assert metrics.getAverageWaitingJobs() == 0;
assert metrics.getCurrentActiveJobs() == 0;
assert metrics.getCurrentCancelledJobs() == 0;
assert metrics.getCurrentJobExecuteTime() == 0;
assert metrics.getCurrentJobWaitTime() == 0;
assert metrics.getCurrentWaitingJobs() == 0;
assert metrics.getMaximumActiveJobs() == 1;
assert metrics.getMaximumCancelledJobs() == 0;
assert metrics.getMaximumJobExecuteTime() >= 0;
assert metrics.getMaximumJobWaitTime() >= 0;
assert metrics.getMaximumRejectedJobs() == 0;
assert metrics.getMaximumWaitingJobs() == 0;
assert metrics.getTotalCancelledJobs() == 0;
assert metrics.getTotalExecutedJobs() == 1;
assert metrics.getTotalRejectedJobs() == 0;
assert metrics.getTotalExecutedTasks() == 1;
assertTrue("MaximumJobExecuteTime=" + metrics.getMaximumJobExecuteTime() +
" is less than AverageJobExecuteTime=" + metrics.getAverageJobExecuteTime(),
metrics.getMaximumJobExecuteTime() >= metrics.getAverageJobExecuteTime());
}
/**
* @throws Exception If failed.
*/
public void testInternalTaskMetrics() throws Exception {
Ignite ignite = grid();
// Visor task is internal and should not affect metrics.
ignite.compute().withName("visor-test-task").execute(new TestInternalTask(), "testArg");
// Let metrics update twice.
final CountDownLatch latch = new CountDownLatch(2);
ignite.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
assert evt.type() == EVT_NODE_METRICS_UPDATED;
latch.countDown();
return true;
}
}, EVT_NODE_METRICS_UPDATED);
// Wait for metrics update.
latch.await();
ClusterMetrics metrics = ignite.cluster().localNode().metrics();
info("Node metrics: " + metrics);
assert metrics.getAverageActiveJobs() == 0;
assert metrics.getAverageCancelledJobs() == 0;
assert metrics.getAverageJobExecuteTime() == 0;
assert metrics.getAverageJobWaitTime() == 0;
assert metrics.getAverageRejectedJobs() == 0;
assert metrics.getAverageWaitingJobs() == 0;
assert metrics.getCurrentActiveJobs() == 0;
assert metrics.getCurrentCancelledJobs() == 0;
assert metrics.getCurrentJobExecuteTime() == 0;
assert metrics.getCurrentJobWaitTime() == 0;
assert metrics.getCurrentWaitingJobs() == 0;
assert metrics.getMaximumActiveJobs() == 0;
assert metrics.getMaximumCancelledJobs() == 0;
assert metrics.getMaximumJobExecuteTime() == 0;
assert metrics.getMaximumJobWaitTime() == 0;
assert metrics.getMaximumRejectedJobs() == 0;
assert metrics.getMaximumWaitingJobs() == 0;
assert metrics.getTotalCancelledJobs() == 0;
assert metrics.getTotalExecutedJobs() == 0;
assert metrics.getTotalRejectedJobs() == 0;
assert metrics.getTotalExecutedTasks() == 0;
assertTrue("MaximumJobExecuteTime=" + metrics.getMaximumJobExecuteTime() +
" is less than AverageJobExecuteTime=" + metrics.getAverageJobExecuteTime(),
metrics.getMaximumJobExecuteTime() >= metrics.getAverageJobExecuteTime());
}
/**
* @throws Exception If failed.
*/
public void testIoMetrics() throws Exception {
Ignite ignite0 = grid();
Ignite ignite1 = startGrid(1);
Object msg = new TestMessage();
int size = ignite0.configuration().getMarshaller().marshal(msg).length;
assert size > MSG_SIZE;
final CountDownLatch latch = new CountDownLatch(MSG_CNT);
ignite0.message().localListen(null, new MessagingListenActor<TestMessage>() {
@Override protected void receive(UUID nodeId, TestMessage rcvMsg) throws Throwable {
latch.countDown();
}
});
ignite1.message().localListen(null, new MessagingListenActor<TestMessage>() {
@Override protected void receive(UUID nodeId, TestMessage rcvMsg) throws Throwable {
respond(rcvMsg);
}
});
for (int i = 0; i < MSG_CNT; i++)
message(ignite0.cluster().forRemotes()).send(null, msg);
latch.await();
ClusterMetrics metrics = ignite0.cluster().localNode().metrics();
info("Node 0 metrics: " + metrics);
// Time sync messages are being sent.
assert metrics.getSentMessagesCount() >= MSG_CNT;
assert metrics.getSentBytesCount() > size * MSG_CNT;
assert metrics.getReceivedMessagesCount() >= MSG_CNT;
assert metrics.getReceivedBytesCount() > size * MSG_CNT;
metrics = ignite1.cluster().localNode().metrics();
info("Node 1 metrics: " + metrics);
// Time sync messages are being sent.
assert metrics.getSentMessagesCount() >= MSG_CNT;
assert metrics.getSentBytesCount() > size * MSG_CNT;
assert metrics.getReceivedMessagesCount() >= MSG_CNT;
assert metrics.getReceivedBytesCount() > size * MSG_CNT;
}
/**
* @throws Exception If failed.
*/
public void testClusterNodeMetrics() throws Exception {
final Ignite ignite0 = grid();
final Ignite ignite1 = startGrid(1);
GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
return ignite0.cluster().nodes().size() == 2 && ignite1.cluster().nodes().size() == 2;
}
}, 3000L);
ClusterMetrics metrics0 = ignite0.cluster().localNode().metrics();
ClusterMetrics nodesMetrics =
ignite0.cluster().forNode(ignite0.cluster().localNode(), ignite1.cluster().localNode()).metrics();
assertEquals(metrics0.getTotalCpus(), nodesMetrics.getTotalCpus());
assertEquals(1, metrics0.getTotalNodes());
assertEquals(2, nodesMetrics.getTotalNodes());
assert metrics0.getHeapMemoryUsed() > 0;
assert metrics0.getHeapMemoryTotal() > 0;
assert metrics0.getNonHeapMemoryMaximum() > 0;
}
/**
* Test message.
*/
@SuppressWarnings("UnusedDeclaration")
private static class TestMessage implements Serializable {
/** */
private final byte[] arr = new byte[MSG_SIZE];
}
/**
* Test internal task.
*/
@GridInternal
private static class TestInternalTask extends GridTestTask {
// No-op.
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.util;
import com.intellij.codeInsight.NullableNotNullManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory;
import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifier;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrCodeBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil;
import org.jetbrains.plugins.groovy.lang.resolve.processors.AccessorResolverProcessor;
import java.beans.Introspector;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author ilyas
*/
public class GroovyPropertyUtils {
private static final Logger LOG = Logger.getInstance(GroovyPropertyUtils.class);
public static final String IS_PREFIX = "is";
public static final String GET_PREFIX = "get";
public static final String SET_PREFIX = "set";
private GroovyPropertyUtils() {
}
public static PsiMethod[] getAllSettersByField(PsiField field) {
return getAllSetters(field.getContainingClass(), field.getName(), field.hasModifierProperty(PsiModifier.STATIC), false);
}
@NotNull
public static PsiMethod[] getAllGettersByField(PsiField field) {
return getAllGetters(field.getContainingClass(), field.getName(), field.hasModifierProperty(PsiModifier.STATIC), false);
}
@Nullable
public static PsiMethod findSetterForField(PsiField field) {
final PsiClass containingClass = field.getContainingClass();
final String propertyName = field.getName();
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
return findPropertySetter(containingClass, propertyName, isStatic, true);
}
@Nullable
public static PsiMethod findGetterForField(PsiField field) {
final PsiClass containingClass = field.getContainingClass();
final String propertyName = field.getName();
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
return findPropertyGetter(containingClass, propertyName, isStatic, true);
}
@Nullable
public static PsiMethod findPropertySetter(@Nullable PsiType type, String propertyName, @NotNull GroovyPsiElement context) {
final String setterName = getSetterName(propertyName);
if (type == null) {
final GrExpression fromText = GroovyPsiElementFactory.getInstance(context.getProject()).createExpressionFromText("this", context);
return findPropertySetter(fromText.getType(), propertyName, context);
}
final AccessorResolverProcessor processor = new AccessorResolverProcessor(setterName, propertyName, context, false);
ResolveUtil.processAllDeclarations(type, processor, ResolveState.initial(), context);
final GroovyResolveResult[] setterCandidates = processor.getCandidates();
return PsiImplUtil.extractUniqueElement(setterCandidates);
}
@Nullable
public static PsiMethod findPropertySetter(PsiClass aClass, String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return null;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertySetter(method)) {
if (propertyName.equals(getPropertyNameBySetter(method))) {
return method;
}
}
}
return null;
}
@NotNull
public static PsiMethod[] getAllGetters(PsiClass aClass, @NotNull String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return PsiMethod.EMPTY_ARRAY;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
List<PsiMethod> result = new ArrayList<>();
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertyGetter(method)) {
if (propertyName.equals(getPropertyNameByGetter(method))) {
result.add(method);
}
}
}
return result.toArray(new PsiMethod[result.size()]);
}
@NotNull
public static PsiMethod[] getAllSetters(PsiClass aClass, @NotNull String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return PsiMethod.EMPTY_ARRAY;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
List<PsiMethod> result = new ArrayList<>();
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertySetter(method)) {
if (propertyName.equals(getPropertyNameBySetter(method))) {
result.add(method);
}
}
}
return result.toArray(new PsiMethod[result.size()]);
}
@Nullable
public static PsiMethod findPropertyGetter(@Nullable PsiClass aClass,
String propertyName,
@Nullable Boolean isStatic,
boolean checkSuperClasses) {
if (aClass == null) return null;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
for (PsiMethod method : methods) {
if (isStatic != null && method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertyGetter(method)) {
if (propertyName.equals(getPropertyNameByGetter(method))) {
return method;
}
}
}
return null;
}
public static boolean isSimplePropertyAccessor(PsiMethod method) {
return isSimplePropertyGetter(method) || isSimplePropertySetter(method);
}//do not check return type
public static boolean isSimplePropertyGetter(PsiMethod method) {
return isSimplePropertyGetter(method, null);
}//do not check return type
public static boolean isSimplePropertyGetter(PsiMethod method, @Nullable String propertyName) {
if (method == null || method.isConstructor()) return false;
if (method.getParameterList().getParametersCount() != 0) return false;
if (!isGetterName(method.getName())) return false;
if (method.getName().startsWith(IS_PREFIX) && !PsiType.BOOLEAN.equals(method.getReturnType())) {
return false;
}
if (PsiType.VOID.equals(method.getReturnType())) return false;
if (propertyName == null) return true;
final String byGetter = getPropertyNameByGetter(method);
return propertyName.equals(byGetter) || (!isPropertyName(byGetter) && propertyName.equals(
getPropertyNameByGetterName(method.getName(), PsiType.BOOLEAN.equals(method.getReturnType()))));
}
public static boolean isSimplePropertySetter(PsiMethod method) {
return isSimplePropertySetter(method, null);
}
public static boolean isSimplePropertySetter(PsiMethod method, @Nullable String propertyName) {
if (method == null || method.isConstructor()) return false;
if (method.getParameterList().getParametersCount() != 1) return false;
if (!isSetterName(method.getName())) return false;
if (propertyName==null) return true;
final String bySetter = getPropertyNameBySetter(method);
return propertyName.equals(bySetter) || (!isPropertyName(bySetter) && propertyName.equals(getPropertyNameBySetterName(method.getName())));
}
@Nullable
public static String getPropertyNameByGetter(PsiMethod getterMethod) {
if (getterMethod instanceof GrAccessorMethod) {
return ((GrAccessorMethod)getterMethod).getProperty().getName();
}
@NonNls String methodName = getterMethod.getName();
final boolean isPropertyBoolean = PsiType.BOOLEAN.equals(getterMethod.getReturnType());
return getPropertyNameByGetterName(methodName, isPropertyBoolean);
}
@Nullable
public static String getPropertyNameByGetterName(@NotNull String methodName, boolean canBeBoolean) {
if (methodName.startsWith(GET_PREFIX) && methodName.length() > 3) {
return decapitalize(methodName.substring(3));
}
if (canBeBoolean && methodName.startsWith(IS_PREFIX) && methodName.length() > 2) {
return decapitalize(methodName.substring(2));
}
return null;
}
@Nullable
public static String getPropertyNameBySetter(PsiMethod setterMethod) {
if (setterMethod instanceof GrAccessorMethod) {
return ((GrAccessorMethod)setterMethod).getProperty().getName();
}
@NonNls String methodName = setterMethod.getName();
return getPropertyNameBySetterName(methodName);
}
@Nullable
public static String getPropertyNameBySetterName(@NotNull String methodName) {
if (methodName.startsWith(SET_PREFIX) && methodName.length() > 3) {
return StringUtil.decapitalize(methodName.substring(3));
}
else {
return null;
}
}
@Nullable
public static String getPropertyNameByAccessorName(String accessorName) {
if (isGetterName(accessorName)) {
return getPropertyNameByGetterName(accessorName, true);
}
else if (isSetterName(accessorName)) {
return getPropertyNameBySetterName(accessorName);
}
return null;
}
@Nullable
public static String getPropertyName(PsiMethod accessor) {
if (isSimplePropertyGetter(accessor)) return getPropertyNameByGetter(accessor);
if (isSimplePropertySetter(accessor)) return getPropertyNameBySetter(accessor);
return null;
}
public static boolean isGetterName(@NotNull String name) {
int prefixLength;
if (name.startsWith(GET_PREFIX)) {
prefixLength = 3;
}
else if (name.startsWith(IS_PREFIX)) {
prefixLength = 2;
}
else {
return false;
}
if (name.length() == prefixLength) return false;
if (isUpperCase(name.charAt(prefixLength))) return true;
return name.length() > prefixLength + 1 && isUpperCase(name.charAt(prefixLength + 1));
}
public static String getGetterNameNonBoolean(@NotNull String name) {
return getAccessorName(GET_PREFIX, name);
}
public static String getGetterNameBoolean(@NotNull String name) {
return getAccessorName(IS_PREFIX, name);
}
public static String getSetterName(@NotNull String name) {
return getAccessorName("set", name);
}
public static String getAccessorName(String prefix, String name) {
if (name.isEmpty()) return prefix;
StringBuilder sb = new StringBuilder();
sb.append(prefix);
if (name.length() > 1 && Character.isUpperCase(name.charAt(1))) {
sb.append(name);
}
else {
sb.append(Character.toUpperCase(name.charAt(0)));
sb.append(name, 1, name.length());
}
return sb.toString();
}
/**
* Returns getter names in priority order
* @param name property name
* @return getter names
*/
public static String[] suggestGettersName(@NotNull String name) {
return new String[]{getGetterNameBoolean(name), getGetterNameNonBoolean(name)};
}
public static boolean isPropertyName(String name) {
if (name.isEmpty()) return false;
if (Character.isUpperCase(name.charAt(0)) && (name.length() == 1 || !Character.isUpperCase(name.charAt(1)))) return false;
return true;
}
public static String[] suggestSettersName(@NotNull String name) {
return new String[]{getSetterName(name)};
}
public static boolean isSetterName(String name) {
return name != null
&& name.startsWith(SET_PREFIX)
&& name.length() > 3
&& (isUpperCase(name.charAt(3)) || (name.length() > 4 && isUpperCase(name.charAt(3))));
}
public static boolean isProperty(@Nullable PsiClass aClass, @Nullable String propertyName, boolean isStatic) {
if (aClass == null || propertyName == null) return false;
final PsiField field = aClass.findFieldByName(propertyName, true);
if (field instanceof GrField && ((GrField)field).isProperty() && field.hasModifierProperty(PsiModifier.STATIC) == isStatic) return true;
final PsiMethod getter = findPropertyGetter(aClass, propertyName, isStatic, true);
if (getter != null && getter.hasModifierProperty(PsiModifier.PUBLIC)) return true;
final PsiMethod setter = findPropertySetter(aClass, propertyName, isStatic, true);
return setter != null && setter.hasModifierProperty(PsiModifier.PUBLIC);
}
public static boolean isProperty(GrField field) {
final PsiClass clazz = field.getContainingClass();
return isProperty(clazz, field.getName(), field.hasModifierProperty(PsiModifier.STATIC));
}
private static boolean isUpperCase(char c) {
return Character.toUpperCase(c) == c;
}
/*public static boolean canBePropertyName(String name) {
return !(name.length() > 1 && Character.isUpperCase(name.charAt(1)) && Character.isLowerCase(name.charAt(0)));
}*/
public static String capitalize(String s) {
if (s.isEmpty()) return s;
if (s.length() == 1) return s.toUpperCase();
if (Character.isUpperCase(s.charAt(1))) return s;
final char[] chars = s.toCharArray();
chars[0] = Character.toUpperCase(chars[0]);
return new String(chars);
}
public static String decapitalize(String s) {
return Introspector.decapitalize(s);
}
@Nullable
public static PsiField findFieldForAccessor(PsiMethod accessor, boolean checkSuperClasses) {
final PsiClass psiClass = accessor.getContainingClass();
if (psiClass == null) return null;
PsiField field = null;
if (!checkSuperClasses) {
field = psiClass.findFieldByName(getPropertyNameByAccessorName(accessor.getName()), true);
}
else {
final String name = getPropertyNameByAccessorName(accessor.getName());
assert name != null;
final PsiField[] allFields = psiClass.getAllFields();
for (PsiField psiField : allFields) {
if (name.equals(psiField.getName())) {
field = psiField;
break;
}
}
}
if (field == null) return null;
if (field.hasModifierProperty(PsiModifier.STATIC) == accessor.hasModifierProperty(PsiModifier.STATIC)) {
return field;
}
return null;
}
@Nullable
public static String getGetterPrefix(PsiMethod getter) {
final String name = getter.getName();
if (name.startsWith(GET_PREFIX)) return GET_PREFIX;
if (name.startsWith(IS_PREFIX)) return IS_PREFIX;
return null;
}
@Nullable
public static String getSetterPrefix(PsiMethod setter) {
if (setter.getName().startsWith(SET_PREFIX)) return SET_PREFIX;
return null;
}
@Nullable
public static String getAccessorPrefix(PsiMethod method) {
final String prefix = getGetterPrefix(method);
if (prefix != null) return prefix;
return getSetterPrefix(method);
}
public static boolean isAccessorFor(PsiMethod accessor, PsiField field) {
final String accessorName = accessor.getName();
final String fieldName = field.getName();
if (!ArrayUtil.contains(accessorName, suggestGettersName(fieldName)) &&
!ArrayUtil.contains(accessorName, suggestSettersName(fieldName))) {
return false;
}
final PsiClass accessorClass = accessor.getContainingClass();
final PsiClass fieldClass = field.getContainingClass();
if (!field.getManager().areElementsEquivalent(accessorClass, fieldClass)) return false;
return accessor.hasModifierProperty(PsiModifier.STATIC) == field.hasModifierProperty(PsiModifier.STATIC);
}
public static List<GrAccessorMethod> getFieldAccessors(GrField field) {
List<GrAccessorMethod> accessors = new ArrayList<>();
final GrAccessorMethod[] getters = field.getGetters();
Collections.addAll(accessors, getters);
final GrAccessorMethod setter = field.getSetter();
if (setter != null) accessors.add(setter);
return accessors;
}
public static GrMethod generateGetterPrototype(PsiField field) {
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(field.getProject());
String name = field.getName();
String getName = getGetterNameNonBoolean(field.getName());
try {
PsiType type = field instanceof GrField ? ((GrField)field).getDeclaredType() : field.getType();
GrMethod getter = factory.createMethod(getName, type);
if (field.hasModifierProperty(PsiModifier.STATIC)) {
PsiUtil.setModifierProperty(getter, PsiModifier.STATIC, true);
}
annotateWithNullableStuff(field, getter);
GrCodeBlock body = factory.createMethodBodyFromText("\nreturn " + name + "\n");
getter.getBlock().replace(body);
return getter;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
public static GrMethod generateSetterPrototype(PsiField field) {
Project project = field.getProject();
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(project);
String name = field.getName();
boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
VariableKind kind = codeStyleManager.getVariableKind(field);
String propertyName = codeStyleManager.variableNameToPropertyName(name, kind);
String setName = getSetterName(field.getName());
final PsiClass containingClass = field.getContainingClass();
try {
GrMethod setMethod = factory.createMethod(setName, PsiType.VOID);
String parameterName = codeStyleManager.propertyNameToVariableName(propertyName, VariableKind.PARAMETER);
final PsiType type = field instanceof GrField ? ((GrField)field).getDeclaredType() : field.getType();
GrParameter param = factory.createParameter(parameterName, type);
annotateWithNullableStuff(field, param);
setMethod.getParameterList().add(param);
PsiUtil.setModifierProperty(setMethod, PsiModifier.STATIC, isStatic);
@NonNls StringBuilder builder = new StringBuilder();
if (name.equals(parameterName)) {
if (!isStatic) {
builder.append("this.");
}
else {
String className = containingClass.getName();
if (className != null) {
builder.append(className);
builder.append(".");
}
}
}
builder.append(name);
builder.append("=");
builder.append(parameterName);
builder.append("\n");
GrCodeBlock body = factory.createMethodBodyFromText(builder.toString());
setMethod.getBlock().replace(body);
return setMethod;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
@SuppressWarnings("MagicConstant")
private static void annotateWithNullableStuff(PsiModifierListOwner original,
PsiModifierListOwner generated) throws IncorrectOperationException {
NullableNotNullManager.getInstance(original.getProject()).copyNullableOrNotNullAnnotation(original, generated);
PsiModifierList modifierList = generated.getModifierList();
if (modifierList != null && modifierList.hasExplicitModifier(GrModifier.DEF)) {
LOG.assertTrue(modifierList instanceof GrModifierList);
if (modifierList.getAnnotations().length > 0 || ((GrModifierList)modifierList).getModifiers().length > 1) {
modifierList.setModifierProperty(GrModifier.DEF, false);
}
}
}
}
| |
package il.ac.technion.ie.utils;
import il.ac.technion.ie.data.structure.BitMatrix;
import il.ac.technion.ie.data.structure.DBRecord;
import il.ac.technion.ie.data.structure.SetPairIF;
import il.ac.technion.ie.model.RecordSet;
import org.enerj.core.SparseBitSet.Iterator;
import org.hyperic.sigar.Mem;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.neo4j.graphdb.*;
import org.neo4j.graphdb.index.Index;
import org.neo4j.graphdb.index.IndexHits;
import org.neo4j.graphdb.index.IndexManager;
import org.neo4j.graphdb.index.RelationshipIndex;
import org.neo4j.kernel.EmbeddedGraphDatabase;
import org.neo4j.kernel.impl.util.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.BitSet;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
public class GDS_NG implements SetPairIF {
public final static String IDS_INDEX_NAME = "ids";
private static final String RECORD_DB_DIR = "target/";
private final static double FREE_MEM_THRESH = 20;
private final static String CONNECT_INDEX_NAME = "connected";
private final static String REL_INDEX_KEY = "type";
private final static String REL_INDEX_VAL = "conn";
private final static String NG_PROP_NAME = "NG_PROP_NAME";
private final static int NUM_SET_THRESH = 10000;
public static AtomicLong timeSpentClearingDB = new AtomicLong(0);
private static int runningNum = 1;
private static Sigar sigar = new Sigar();
boolean wroteToDB = false;
private double NGLimit = Double.MAX_VALUE;
private EmbeddedGraphDatabase GDS;
private String storeDir;
private int maxNG = 0;
private BitSet coveredRows = new BitSet();
private BitMatrix tempStorageMatrix;
public GDS_NG(double NGLimit) {
tempStorageMatrix = new BitMatrix(RecordSet.DB_SIZE);
maxNG = 0;
this.NGLimit = NGLimit;
}
public GDS_NG() {
tempStorageMatrix = new BitMatrix(RecordSet.DB_SIZE);
maxNG = 0;
}
public static Mem getMem() {
Mem retVal = null;
try {
retVal = sigar.getMem();
} catch (SigarException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return retVal;
}
private static void registerShutdownHook(final GraphDatabaseService graphDb) {
// Registers a shutdown hook for the Neo4j instance so that it
// shuts down nicely when the VM exits (even if you "Ctrl-C" the
// running example before it's completed)
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
graphDb.shutdown();
}
});
}
public static double percentegeOfFreeMem() {
return getMem().getFreePercent();
}
public static void main(String[] args) {
GDS_NG gds_ng = new GDS_NG(123);
gds_ng.setPair(1, 3, 0);
gds_ng.setPair(2, 3, 0);
gds_ng.setPair(4, 1000, 0);
gds_ng.setPair(1000, 4, 0);
gds_ng.setPair(2, 5, 0);
BitSet bs = gds_ng.getCoveredRows();
System.out.println("bs: " + bs.toString());
System.out.println("gds_ng.getMaxNG(): " + gds_ng.getMaxNG());
BitMatrix bm = gds_ng.exportToBM();
System.out.println("end test");
}
// TP+ FP - 1 in both the Ground Truth and in the result
public static double[] TrueAndFalsePositives(BitMatrix GTMatrix,
GDS_NG ActualGDS) {
long TP = 0;
long FP = 0;
IndexManager IM = ActualGDS.getGDS().index();
RelationshipIndex index = IM.forRelationships(CONNECT_INDEX_NAME);
for (Relationship connected : index.get(REL_INDEX_KEY, REL_INDEX_VAL)) {
Integer i = (Integer) connected.getStartNode().getProperty(
DBRecord.ID_PROP_NAME);
Integer j = (Integer) connected.getEndNode().getProperty(
DBRecord.ID_PROP_NAME);
if (GTMatrix.getPair(i, j)) {
TP++;
} else {
FP++;
}
}
return new double[]{TP, FP};
}
public static double FalseNegatives(BitMatrix GTMatrix, GDS_NG ActualGDS) {
long FN = 0;
Iterator It = GTMatrix.getSBS().getIterator();
while (It.hasNext()) {
long nextSetBit = It.next();
int j = (int) (nextSetBit % (long) RecordSet.DB_SIZE);
int i = (int) ((nextSetBit - j) / (long) RecordSet.DB_SIZE);
Node nodei = ActualGDS.getNodeFromIdx(i);
Node nodej = ActualGDS.getNodeFromIdx(j);
boolean exists = ActualGDS.doesRelationshipExist(nodei, nodej);
if (!exists) {
FN++;
}
}
return FN;
}
public void setNGLimit(double NGLimit) {
this.NGLimit = NGLimit;
}
private void createEmbeddedDB() {
storeDir = RECORD_DB_DIR + "gds" + runningNum;
try {
FileUtils.deleteRecursively(new File(storeDir));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
runningNum++;
GDS = new EmbeddedGraphDatabase(storeDir);
registerShutdownHook(GDS);
}
public void clearDb() {
long start = System.currentTimeMillis();
maxNG = 0;
try {
if (wroteToDB) {
FileUtils.deleteRecursively(new File(storeDir));
wroteToDB = false;
}
if (tempStorageMatrix != null) {
tempStorageMatrix.clearAll();
}
this.coveredRows.clear();
NGLimit = Double.MAX_VALUE;
timeSpentClearingDB.addAndGet(System.currentTimeMillis() - start);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public GraphDatabaseService getGDS() {
return GDS;
}
public int getMaxNG() {
return maxNG;
}
public BitSet getCoveredRows() {
return coveredRows;
}
public void setPair(int i, int j, double score) {
if (i < j) {
setIndex(i, j);
} else {
setIndex(j, i);
}
}
private Node getNodeFromIdx(int id) {
IndexManager IM = GDS.index();
Index<Node> index = IM.forNodes(IDS_INDEX_NAME);
IndexHits<Node> hits = index.get(DBRecord.ID_PROP_NAME, id);
Node retVal = hits.getSingle();
if (retVal != null) {
return retVal;
}
retVal = GDS.createNode();
retVal.setProperty(DBRecord.ID_PROP_NAME, id);
retVal.setProperty(NG_PROP_NAME, 0);
// add to node index
index.add(retVal, DBRecord.ID_PROP_NAME, id);
return retVal;
}
private boolean doesRelationshipExist(Node inode, Node jnode) {
IndexManager IM = GDS.index();
RelationshipIndex index = IM.forRelationships(CONNECT_INDEX_NAME);
boolean exists = (index.get(REL_INDEX_KEY, REL_INDEX_VAL, inode, jnode)
.getSingle() != null);
return exists;
}
private void addRelationshipToIndex(Relationship rel) {
IndexManager IM = GDS.index();
RelationshipIndex index = IM.forRelationships(CONNECT_INDEX_NAME);
index.add(rel, REL_INDEX_KEY, REL_INDEX_VAL);
}
private int incrememntNeighbors(Node node, int num) {
Integer nbrs = (Integer) node.getProperty(NG_PROP_NAME);
node.setProperty(NG_PROP_NAME, nbrs + num);
return (nbrs + num);
}
public void writeToDB(BitMatrix BM) {
if (!wroteToDB) {
createEmbeddedDB();
wroteToDB = true;
}
Transaction tx = GDS.beginTx();
try {
int[] pair = new int[2];
Iterator It = BM.getSBS().getIterator();
while (It.hasNext()) {
long nextSetBit = It.next();
BM.getSetPairFromIndex(nextSetBit, pair);
int r = pair[0];
int c = pair[1];
Node rnode = getNodeFromIdx(r);
Node cnode = getNodeFromIdx(c);
if (!doesRelationshipExist(rnode, cnode)) {
Relationship rel = rnode.createRelationshipTo(cnode,
RelTypes.CONNECTED);
addRelationshipToIndex(rel);
int newrnodeNbrs = incrememntNeighbors(rnode, 1);
int newcnodeNbrs = incrememntNeighbors(cnode, 1);
maxNG = Math.max(maxNG, Math.max(newrnodeNbrs,
newcnodeNbrs));
}
}
tx.success();
} finally {
tx.finish();
}
}
private void setIndex(int i, int j) {
tempStorageMatrix.setPair(i, j);
maxNG = Math.max(maxNG, tempStorageMatrix.getMaxNG());
updateCoverage(i);
updateCoverage(j);
/**
* We want to write to the DB on the following terms:
* 1. not enough free mem
* 2. we will still need to write to the BM, meaning maxNG < NGLimit
* 3. The matrix has at least 10000 set bits (otherwise there is no point in writing it)
*/
if (tempStorageMatrix.numOfSet() > NUM_SET_THRESH && maxNG < NGLimit && percentegeOfFreeMem() < FREE_MEM_THRESH) {
//System.out.println("DEBUG: getActualFree(): " + getMem().getActualFree() + " MB");
//System.out.println("DEBUG: memAn.getActualUsed(): " + getMem().getActualUsed() + " MB");
//System.out.println("DEBUG: perfreeMem: " + percentegeOfFreeMem());
//first time writing to DB
writeToDB(tempStorageMatrix);
tempStorageMatrix = null;
Runtime.getRuntime().gc();
tempStorageMatrix = new BitMatrix(RecordSet.DB_SIZE);
}
}
public void endUse() {
if (GDS != null) {
GDS.shutdown();
}
}
private void updateCoverage(int index) {
coveredRows.set(index);
}
public BitMatrix exportToBM() {
BitMatrix retval = tempStorageMatrix;
if (wroteToDB) {
IndexManager IM = GDS.index();
RelationshipIndex index = IM.forRelationships(CONNECT_INDEX_NAME);
for (Relationship connected : index.get(REL_INDEX_KEY, REL_INDEX_VAL)) {
Integer i = (Integer) connected.getStartNode().getProperty(
DBRecord.ID_PROP_NAME);
Integer j = (Integer) connected.getEndNode().getProperty(
DBRecord.ID_PROP_NAME);
// System.out.println("i:" + i + " j:" + j);
retval.setPair(i, j);
}
}
System.out.println("exportToBM: returning a matrix with "
+ retval.numOfSet() + " set ");
return retval;
}
@Override
public void setColumnsSupport(List<Integer> items, int recordID,int recordID1) {
// TODO Auto-generated method stub
}
private static enum RelTypes implements RelationshipType {
CONNECTED
}
}
| |
package com.linkedin.databus.monitoring.mbean;
/*
*
* Copyright 2013 LinkedIn Corp. All rights reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
public class EventSourceStatistics
implements EventSourceStatisticsMBean
{
private final String _sourceName;
//cycle = one poll of the source DB
private int _numConsecutiveCyclesWithEvents;
private int _numConsecutiveCyclesWithoutEvents;
private int _numCyclesWithEvents;
private int _numCyclesWithoutEvents;
private int _totalEvents;
private long _totalEventSerializedSize;
private long _totalEventFactoryTimeMillis;
private long _lastCycleWithEventsTimestamp;
private long _maxScn;
private long _maxDBScn;
private long _numErrors;
private long _timestampLastDBAccess;
public EventSourceStatistics(String sourceName)
{
_sourceName = sourceName;
}
public EventSourceStatistics(String name, int numDataEvents,
long timeSinceLastAccess, long maxScn, long numErrors,
long sizeDataEvents)
{
_sourceName = name;
_totalEvents=numDataEvents;
_timestampLastDBAccess = timeSinceLastAccess;
_maxScn = maxScn;
_totalEventSerializedSize = sizeDataEvents;
_numErrors = numErrors;
_totalEventFactoryTimeMillis = 0;
_numConsecutiveCyclesWithEvents = 0;
_numConsecutiveCyclesWithoutEvents = 0;
_numCyclesWithEvents = 0;
_numCyclesWithoutEvents = 0;
_lastCycleWithEventsTimestamp = 0;
}
public synchronized void addEmptyEventCycle()
{
// Cycle did not have events
_numConsecutiveCyclesWithEvents = 0;
_numConsecutiveCyclesWithoutEvents ++;
_numCyclesWithoutEvents ++;
}
public synchronized void addEventCycle(int numEvents, long eventFactoryTimeMillis, long eventSerializedSize, long maxScn)
{
if(numEvents > 0)
{
// Cycle had events
_numConsecutiveCyclesWithEvents ++;
_numConsecutiveCyclesWithoutEvents = 0;
_numCyclesWithEvents ++;
_totalEvents += numEvents;
_totalEventSerializedSize += eventSerializedSize;
_totalEventFactoryTimeMillis += eventFactoryTimeMillis;
_lastCycleWithEventsTimestamp = System.currentTimeMillis();
_maxScn = maxScn;
}
else
{
addEmptyEventCycle();
}
}
public synchronized void addError()
{
++_numErrors;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getAvgEventFactoryTimeMillisPerEvent()
*/
@Override
public synchronized long getAvgEventFactoryTimeMillisPerEvent()
{
return _totalEvents != 0 ? _totalEventFactoryTimeMillis / _totalEvents : 0;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getAvgEventSerializedSize()
*/
@Override
public synchronized long getAvgEventSerializedSize()
{
return _totalEvents != 0 ? _totalEventSerializedSize / _totalEvents : 0;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getAvgNumEventsPerNonEmptyCycle()
*/
@Override
public synchronized int getAvgNumEventsPerNonEmptyCycle()
{
return (_numCyclesWithEvents != 0 ? _totalEvents / _numCyclesWithEvents : 0);
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getNumConsecutiveCyclesWithEvents()
*/
@Override
public synchronized int getNumConsecutiveCyclesWithEvents()
{
return _numConsecutiveCyclesWithEvents;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getNumConsecutiveCyclesWithoutEvents()
*/
@Override
public synchronized int getNumConsecutiveCyclesWithoutEvents()
{
return _numConsecutiveCyclesWithoutEvents;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getNumCyclesWithEvents()
*/
@Override
public synchronized int getNumCyclesWithEvents()
{
return _numCyclesWithEvents;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getNumCyclesWithoutEvents()
*/
@Override
public synchronized int getNumCyclesWithoutEvents()
{
return _numCyclesWithoutEvents;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getSourceName()
*/
@Override
public synchronized String getSourceName()
{
return _sourceName;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getNumCyclesTotal()
*/
@Override
public synchronized int getNumCyclesTotal()
{
return _numCyclesWithEvents + _numCyclesWithoutEvents;
}
@Override
public synchronized long getMillisSinceLastCycleWithEvents()
{
return System.currentTimeMillis() - _lastCycleWithEventsTimestamp;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getMaxScn()
*/
@Override
public synchronized long getMaxScn()
{
return _maxScn;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#getTotalEvents()
*/
@Override
public synchronized int getNumTotalEvents()
{
return _totalEvents;
}
/*
* @see com.linkedin.databus.monitoring.mbean.EventSourceStatisticsMBean#reset()
*/
@Override
public synchronized void reset()
{
_numConsecutiveCyclesWithEvents = 0;
_numConsecutiveCyclesWithoutEvents = 0;
_numCyclesWithEvents = 0;
_numCyclesWithoutEvents = 0;
_totalEvents = 0;
_totalEventSerializedSize = 0;
_totalEventFactoryTimeMillis = 0;
_lastCycleWithEventsTimestamp = 0;
_maxScn = 0;
_maxDBScn = 0;
_numErrors = 0;
_timestampLastDBAccess = 0;
}
public synchronized void addTimeOfLastDBAccess(long ts)
{
_timestampLastDBAccess = ts;
}
public synchronized void addMaxDBScn(long dbscn)
{
_maxDBScn = dbscn;
}
@Override
public synchronized long getMaxDBScn()
{
return _maxDBScn;
}
@Override
public synchronized long getNumErrors()
{
return _numErrors;
}
@Override
public synchronized long getTimeSinceLastDBAccess()
{
return System.currentTimeMillis() - _timestampLastDBAccess;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudhsm.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudhsm-2014-05-30/GetConfig" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetConfigResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The type of credentials.
* </p>
*/
private String configType;
/**
* <p>
* The chrystoki.conf configuration file.
* </p>
*/
private String configFile;
/**
* <p>
* The certificate file containing the server.pem files of the HSMs.
* </p>
*/
private String configCred;
/**
* <p>
* The type of credentials.
* </p>
*
* @param configType
* The type of credentials.
*/
public void setConfigType(String configType) {
this.configType = configType;
}
/**
* <p>
* The type of credentials.
* </p>
*
* @return The type of credentials.
*/
public String getConfigType() {
return this.configType;
}
/**
* <p>
* The type of credentials.
* </p>
*
* @param configType
* The type of credentials.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetConfigResult withConfigType(String configType) {
setConfigType(configType);
return this;
}
/**
* <p>
* The chrystoki.conf configuration file.
* </p>
*
* @param configFile
* The chrystoki.conf configuration file.
*/
public void setConfigFile(String configFile) {
this.configFile = configFile;
}
/**
* <p>
* The chrystoki.conf configuration file.
* </p>
*
* @return The chrystoki.conf configuration file.
*/
public String getConfigFile() {
return this.configFile;
}
/**
* <p>
* The chrystoki.conf configuration file.
* </p>
*
* @param configFile
* The chrystoki.conf configuration file.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetConfigResult withConfigFile(String configFile) {
setConfigFile(configFile);
return this;
}
/**
* <p>
* The certificate file containing the server.pem files of the HSMs.
* </p>
*
* @param configCred
* The certificate file containing the server.pem files of the HSMs.
*/
public void setConfigCred(String configCred) {
this.configCred = configCred;
}
/**
* <p>
* The certificate file containing the server.pem files of the HSMs.
* </p>
*
* @return The certificate file containing the server.pem files of the HSMs.
*/
public String getConfigCred() {
return this.configCred;
}
/**
* <p>
* The certificate file containing the server.pem files of the HSMs.
* </p>
*
* @param configCred
* The certificate file containing the server.pem files of the HSMs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetConfigResult withConfigCred(String configCred) {
setConfigCred(configCred);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getConfigType() != null)
sb.append("ConfigType: ").append(getConfigType()).append(",");
if (getConfigFile() != null)
sb.append("ConfigFile: ").append(getConfigFile()).append(",");
if (getConfigCred() != null)
sb.append("ConfigCred: ").append(getConfigCred());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetConfigResult == false)
return false;
GetConfigResult other = (GetConfigResult) obj;
if (other.getConfigType() == null ^ this.getConfigType() == null)
return false;
if (other.getConfigType() != null && other.getConfigType().equals(this.getConfigType()) == false)
return false;
if (other.getConfigFile() == null ^ this.getConfigFile() == null)
return false;
if (other.getConfigFile() != null && other.getConfigFile().equals(this.getConfigFile()) == false)
return false;
if (other.getConfigCred() == null ^ this.getConfigCred() == null)
return false;
if (other.getConfigCred() != null && other.getConfigCred().equals(this.getConfigCred()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getConfigType() == null) ? 0 : getConfigType().hashCode());
hashCode = prime * hashCode + ((getConfigFile() == null) ? 0 : getConfigFile().hashCode());
hashCode = prime * hashCode + ((getConfigCred() == null) ? 0 : getConfigCred().hashCode());
return hashCode;
}
@Override
public GetConfigResult clone() {
try {
return (GetConfigResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalMapping;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.Arrays;
/**
* An aggregator of string values that relies on global ordinals in order to build buckets.
*/
public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggregator {
protected final ValuesSource.Bytes.WithOrdinals.FieldData valuesSource;
protected final IncludeExclude includeExclude;
protected BytesValues.WithOrdinals globalValues;
// TODO: cache the acceptedglobalValues per aggregation definition.
// We can't cache this yet in ValuesSource, since ValuesSource is reused per field for aggs during the execution.
// If aggs with same field, but different include/exclude are defined, then the last defined one will override the
// first defined one.
// So currently for each instance of this aggregator the acceptedglobalValues will be computed, this is unnecessary
// especially if this agg is on a second layer or deeper.
protected LongBitSet acceptedGlobalOrdinals;
public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, long estimatedBucketCount,
long maxOrd, InternalOrder order, BucketCountThresholds bucketCountThresholds,
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode) {
super(name, factories, maxOrd, aggregationContext, parent, order, bucketCountThresholds, collectionMode);
this.valuesSource = valuesSource;
this.includeExclude = includeExclude;
}
protected long getBucketOrd(long termOrd) {
return termOrd;
}
@Override
public boolean shouldCollect() {
return true;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
globalValues = valuesSource.globalBytesValues();
if (acceptedGlobalOrdinals != null) {
globalValues = new FilteredOrdinals(globalValues, acceptedGlobalOrdinals);
} else if (includeExclude != null) {
acceptedGlobalOrdinals = includeExclude.acceptedGlobalOrdinals(globalValues, valuesSource);
globalValues = new FilteredOrdinals(globalValues, acceptedGlobalOrdinals);
}
}
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
final int numOrds = globalValues.setDocument(doc);
for (int i = 0; i < numOrds; i++) {
final long globalOrd = globalValues.nextOrd();
collectExistingBucket(doc, globalOrd);
}
}
protected static void copy(BytesRef from, BytesRef to) {
if (to.bytes.length < from.length) {
to.bytes = new byte[ArrayUtil.oversize(from.length, RamUsageEstimator.NUM_BYTES_BYTE)];
}
to.offset = 0;
to.length = from.length;
System.arraycopy(from.bytes, from.offset, to.bytes, 0, from.length);
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
if (globalValues == null) { // no context in this reader
return buildEmptyAggregation();
}
final int size;
if (bucketCountThresholds.getMinDocCount() == 0) {
// if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns
size = (int) Math.min(globalValues.getMaxOrd(), bucketCountThresholds.getShardSize());
} else {
size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize());
}
BucketPriorityQueue ordered = new BucketPriorityQueue(size, order.comparator(this));
OrdBucket spare = new OrdBucket(-1, 0, null);
for (long globalTermOrd = BytesValues.WithOrdinals.MIN_ORDINAL; globalTermOrd < globalValues.getMaxOrd(); ++globalTermOrd) {
if (includeExclude != null && !acceptedGlobalOrdinals.get(globalTermOrd)) {
continue;
}
final long bucketOrd = getBucketOrd(globalTermOrd);
final int bucketDocCount = bucketOrd < 0 ? 0 : bucketDocCount(bucketOrd);
if (bucketCountThresholds.getMinDocCount() > 0 && bucketDocCount == 0) {
continue;
}
spare.globalOrd = globalTermOrd;
spare.bucketOrd = bucketOrd;
spare.docCount = bucketDocCount;
if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) {
spare = (OrdBucket) ordered.insertWithOverflow(spare);
if (spare == null) {
spare = new OrdBucket(-1, 0, null);
}
}
}
// Get the top buckets
final InternalTerms.Bucket[] list = new InternalTerms.Bucket[ordered.size()];
long survivingBucketOrds[] = new long[ordered.size()];
for (int i = ordered.size() - 1; i >= 0; --i) {
final OrdBucket bucket = (OrdBucket) ordered.pop();
survivingBucketOrds[i] = bucket.bucketOrd;
BytesRef scratch = new BytesRef();
copy(globalValues.getValueByOrd(bucket.globalOrd), scratch);
list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null);
list[i].bucketOrd = bucket.bucketOrd;
}
//replay any deferred collections
runDeferredCollections(survivingBucketOrds);
//Now build the aggs
for (int i = 0; i < list.length; i++) {
Bucket bucket = list[i];
bucket.aggregations = bucket.docCount == 0 ? bucketEmptyAggregations() : bucketAggregations(bucket.bucketOrd);
}
return new StringTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), Arrays.asList(list));
}
/** This is used internally only, just for compare using global ordinal instead of term bytes in the PQ */
static class OrdBucket extends InternalTerms.Bucket {
long globalOrd;
OrdBucket(long globalOrd, long docCount, InternalAggregations aggregations) {
super(docCount, aggregations);
this.globalOrd = globalOrd;
}
@Override
int compareTerm(Terms.Bucket other) {
return Long.compare(globalOrd, ((OrdBucket)other).globalOrd);
}
@Override
public String getKey() {
throw new UnsupportedOperationException();
}
@Override
public Text getKeyAsText() {
throw new UnsupportedOperationException();
}
@Override
Object getKeyAsObject() {
throw new UnsupportedOperationException();
}
@Override
Bucket newBucket(long docCount, InternalAggregations aggs) {
throw new UnsupportedOperationException();
}
@Override
public Number getKeyAsNumber() {
throw new UnsupportedOperationException();
}
}
/**
* Variant of {@link GlobalOrdinalsStringTermsAggregator} that rebases hashes in order to make them dense. Might be
* useful in case few hashes are visited.
*/
public static class WithHash extends GlobalOrdinalsStringTermsAggregator {
private final LongHash bucketOrds;
public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, long estimatedBucketCount,
long maxOrd, InternalOrder order, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext,
Aggregator parent, SubAggCollectionMode collectionMode) {
// Set maxOrd to estimatedBucketCount! To be conservative with memory.
super(name, factories, valuesSource, estimatedBucketCount, estimatedBucketCount, order, bucketCountThresholds, includeExclude, aggregationContext, parent, collectionMode);
bucketOrds = new LongHash(estimatedBucketCount, aggregationContext.bigArrays());
}
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
final int numOrds = globalValues.setDocument(doc);
for (int i = 0; i < numOrds; i++) {
final long globalOrd = globalValues.nextOrd();
long bucketOrd = bucketOrds.add(globalOrd);
if (bucketOrd < 0) {
bucketOrd = -1 - bucketOrd;
collectExistingBucket(doc, bucketOrd);
} else {
collectBucket(doc, bucketOrd);
}
}
}
@Override
protected long getBucketOrd(long termOrd) {
return bucketOrds.find(termOrd);
}
@Override
protected void doClose() {
Releasables.close(bucketOrds);
}
}
/**
* Variant of {@link GlobalOrdinalsStringTermsAggregator} that resolves global ordinals post segment collection
* instead of on the fly for each match.This is beneficial for low cardinality fields, because it can reduce
* the amount of look-ups significantly.
*/
public static class LowCardinality extends GlobalOrdinalsStringTermsAggregator {
private final IntArray segmentDocCounts;
private BytesValues.WithOrdinals segmentOrdinals;
private IntArray current;
public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, long estimatedBucketCount,
long maxOrd, InternalOrder order, BucketCountThresholds bucketCountThresholds, AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode) {
super(name, factories, valuesSource, estimatedBucketCount, maxOrd, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode);
this.segmentDocCounts = bigArrays.newIntArray(maxOrd, true);
}
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
final int numOrds = segmentOrdinals.setDocument(doc);
for (int i = 0; i < numOrds; i++) {
final long segmentOrd = segmentOrdinals.nextOrd();
current.increment(segmentOrd, 1);
}
}
@Override
public void setNextReader(AtomicReaderContext reader) {
if (segmentOrdinals != null && segmentOrdinals.getMaxOrd() != globalValues.getMaxOrd()) {
mapSegmentCountsToGlobalCounts();
}
globalValues = valuesSource.globalBytesValues();
segmentOrdinals = valuesSource.bytesValues();
if (segmentOrdinals.getMaxOrd() != globalValues.getMaxOrd()) {
current = segmentDocCounts;
} else {
current = getDocCounts();
}
}
@Override
protected void doPostCollection() {
if (segmentOrdinals.getMaxOrd() != globalValues.getMaxOrd()) {
mapSegmentCountsToGlobalCounts();
}
}
@Override
protected void doClose() {
Releasables.close(segmentDocCounts);
}
private void mapSegmentCountsToGlobalCounts() {
// There is no public method in Ordinals.Docs that allows for this mapping...
// This is the cleanest way I can think of so far
GlobalOrdinalMapping mapping = (GlobalOrdinalMapping) globalValues;
for (int i = 0; i < segmentDocCounts.size(); i++) {
final int inc = segmentDocCounts.set(i, 0);
if (inc == 0) {
continue;
}
final long globalOrd = mapping.getGlobalOrd(i);
try {
incrementBucketDocCount(inc, globalOrd);
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
}
}
private static final class FilteredOrdinals extends BytesValues.WithOrdinals {
private final BytesValues.WithOrdinals inner;
private final LongBitSet accepted;
private long currentOrd;
private long[] buffer = new long[0];
private int bufferSlot;
private FilteredOrdinals(BytesValues.WithOrdinals inner, LongBitSet accepted) {
super(inner.isMultiValued());
this.inner = inner;
this.accepted = accepted;
}
@Override
public long getMaxOrd() {
return inner.getMaxOrd();
}
@Override
public long getOrd(int docId) {
long ord = inner.getOrd(docId);
if (accepted.get(ord)) {
return currentOrd = ord;
} else {
return currentOrd = MISSING_ORDINAL;
}
}
@Override
public long nextOrd() {
return currentOrd = buffer[bufferSlot++];
}
@Override
public int setDocument(int docId) {
int numDocs = inner.setDocument(docId);
buffer = ArrayUtil.grow(buffer, numDocs);
bufferSlot = 0;
int numAcceptedOrds = 0;
for (int slot = 0; slot < numDocs; slot++) {
long ord = inner.nextOrd();
if (accepted.get(ord)) {
buffer[numAcceptedOrds] = ord;
numAcceptedOrds++;
}
}
return numAcceptedOrds;
}
@Override
public BytesRef getValueByOrd(long ord) {
return inner.getValueByOrd(ord);
}
}
}
| |
package com.dinglian.server.chuqulang.utils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
import org.apache.commons.lang.StringUtils;
public class DateUtils {
private static final SimpleDateFormat dataFormat = new SimpleDateFormat("dd/MM/yyyy");
private static final SimpleDateFormat sqlDataFormat = new SimpleDateFormat("yyyy-MM-dd");
private static final SimpleDateFormat dataFormatWithTime = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
private static final SimpleDateFormat dataFormatWithTimeNoSecond = new SimpleDateFormat("dd/MM/yyyy HH:mm");
private static final SimpleDateFormat testFormatWithTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final SimpleDateFormat millSecondFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss-SSS");
private static final SimpleDateFormat sqlDateFormatWithTime = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private static final SimpleDateFormat dataFormatString = new SimpleDateFormat("yyMMdd");
private static final SimpleDateFormat dataFormatExtJs = new SimpleDateFormat("MMM dd yyyy");
private static final SimpleDateFormat dataFormatUS = new SimpleDateFormat("MM/dd/yy");
private static final SimpleDateFormat dataFormatToUS = new SimpleDateFormat("MMM-dd-yyyy", Locale.US);
private static final SimpleDateFormat dataFormatCombo = new SimpleDateFormat("yy/mm/dd HH:mm");
public static final SimpleDateFormat dateFormatString = new SimpleDateFormat("yyMMddHHmmss");
public static final SimpleDateFormat dateFormatYear = new SimpleDateFormat("yy");
public final static String defaultFormatStr = "dd/MM/yyyy";
public final static String dMyHm = "dd/MM/yyyy HH:mm";
public final static String dMyHms = "dd/MM/yyyy HH:mm:ss";
public final static String dMy = "dd/MM/yyyy";
public final static String yMdHms = "yyyy-MM-dd HH:mm:ss";
public final static String yMd = "yyyy-MM-dd";
public final static String yyyyMMddHM = "yyyyMMdd HH:mm";
public final static String yyyyMMddHmsMs = "yyyyMMdd HHmmssSSS";
public final static String yyyyMMddHmsMsWithNoSpace = "yyyyMMdd:HHmmssSSS";
public final static String SUNDAY = "sun";
public final static String MONDAY = "mon";
public final static String TUESDAY = "tue";
public final static String WEDNESDAY = "wed";
public final static String THURSDAY = "thu";
public final static String FRIDAY = "fri";
public final static String SATURDAY = "sat";
public final static String JANUARY = "January";
public final static String FEBRUARY = "February";
public final static String MARCH = "March";
public final static String APRIL = "April";
public final static String MAY = "May";
public final static String JUNE = "June";
public final static String JULY = "July";
public final static String AUGUST = "August";
public final static String SEPTEMBER = "September";
public final static String OCTOBER = "October";
public final static String NOVEMBER = "November";
public final static String DECEMBER = "December";
public final static String extDateFormat = "yyyy/MM/dd";
public static final String[] MONTH = { "January", "February", "March", "April", "May", "June", "July", "August",
"September", "October", "November", "December" };
/** Creates a new instance of DateUtils */
public DateUtils() {
}
public static String format(Date date, String format) {
if (date == null) {
return "";
}
SimpleDateFormat dataFormat = new SimpleDateFormat(format);
return dataFormat.format(date);
}
public static String format(Calendar calendar, String format) {
if (calendar == null) {
return "";
}
SimpleDateFormat dataFormat = new SimpleDateFormat(format);
return dataFormat.format(calendar.getTime());
}
public static Date parse(String dateStr, String format) {
if (StringUtils.isBlank(dateStr)) {
return null;
}
Date date = null;
SimpleDateFormat dataFormat = new SimpleDateFormat(format);
try {
date = dataFormat.parse(dateStr);
} catch (Exception ex) {
ex.printStackTrace();
}
return date;
}
public static String transeferFormat(String dateStr, String fromFormat, String toFormat) {
if (StringUtils.isBlank(dateStr)) {
return "";
}
Date date = parse(dateStr, fromFormat);
return format(date, toFormat);
}
public static Date parse(String dateStr, SimpleDateFormat format) {
if (StringUtils.isBlank(dateStr)) {
return null;
}
Date date = null;
try {
date = format.parse(dateStr);
} catch (Exception ex) {
ex.printStackTrace();
}
return date;
}
public static Calendar parseCalendar(String dateStr, String format) {
Date date = parse(dateStr, format);
if (date == null) {
return null;
}
Calendar calendar = GregorianCalendar.getInstance();
calendar.setTime(date);
return calendar;
}
public static String format(Date date) {
return dataFormat.format(date);
}
public static String simpleFormat(Date date) {
return dataFormatString.format(date);
}
public static String sqlDataFormat(Date date) {
return sqlDataFormat.format(date);
}
public static Date dataFormat(String date) throws ParseException {
return dataFormatCombo.parse(date);
}
public static Date sqlDataFormat(String date) throws ParseException {
return sqlDataFormat.parse(date);
}
public static String sqlDataStringFormat(String date) {
return sqlDataFormat.format(date);
}
public static String formatSqlWithTime(Date date) {
return sqlDateFormatWithTime.format(date);
}
public static String formatWithTime(Date date) {
return dataFormatWithTime.format(date);
}
public static String format(Calendar calendar) {
if (calendar == null) {
return "";
}
return format(calendar.getTime());
}
public static String formatSql(Calendar calendar) {
if (calendar == null) {
return "";
}
return sqlDataFormat(calendar.getTime());
}
public static String formatSqlWithTime(Calendar calendar) {
if (calendar == null) {
return "";
}
return formatSqlWithTime(calendar.getTime());
}
public static String formatSqlDataFormat(Calendar calendar) {
if (calendar == null) {
return "";
}
return sqlDataFormat(calendar.getTime());
}
public static String formatWithTime(Calendar calendar) {
if (calendar == null) {
return "";
}
return formatWithTime(calendar.getTime());
}
public static Date parse(String dateInStr) throws ParseException {
return dataFormat.parse(dateInStr);
}
public static Date parseWithTime(String dateInStr) throws ParseException {
return dataFormatWithTime.parse(dateInStr);
}
public static Date parseForExtJs(String dateInStr) throws ParseException {
return dataFormatExtJs.parse(dateInStr);
}
public static Date parseForUS(String dateInStr) throws ParseException {
return dataFormatUS.parse(dateInStr);
}
public static Calendar parseCalendar(String dateInStr, TimeZone timeZone) throws ParseException {
if (StringUtils.isBlank(dateInStr)) {
return null;
}
Date date = parse(dateInStr);
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar;
}
public static Calendar parseCalendarWithTime(String dateInStr, TimeZone timeZone) throws ParseException {
if (StringUtils.isBlank(dateInStr)) {
return null;
}
Date date = parseWithTime(dateInStr);
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar;
}
public static Calendar parseCalendarForExtJs(String dateInStr, TimeZone timeZone) throws ParseException {
if (StringUtils.isBlank(dateInStr)) {
return null;
}
Date date = parseForExtJs(dateInStr);
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar;
}
public static Calendar parseCalendarUS(String dateInStr, TimeZone timeZone) throws ParseException {
if (StringUtils.isBlank(dateInStr)) {
return null;
}
Date date = parseForUS(dateInStr);
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar;
}
public static Calendar getCalendar() throws ParseException {
Calendar calendar = Calendar.getInstance();
return calendar;
}
public static Date getSystemDate() throws ParseException {
// return DateUtils.sqlDataFormat(DateUtils.sqlDataFormat(new Date()));
String dateInStr = testFormatWithTime.format(new Date().getTime());
return sqlDateFormatWithTime.parse(dateInStr);
}
public static Date getSystemTime() throws ParseException {
Date systemTime = new Date();
String dateInStr = sqlDateFormatWithTime.format(new Date().getTime());
systemTime = sqlDateFormatWithTime.parse(dateInStr);
return systemTime;
}
public static String getSystemTimeStr() throws ParseException {
String dateInStr = sqlDateFormatWithTime.format(new Date().getTime());
return dateInStr;
}
public static String getCurrentTimeWithSecond() throws ParseException {
String dateInStr = testFormatWithTime.format(new Date().getTime());
return dateInStr;
}
public static long getDiffTimeWithSecond(String startTime, String endTime) throws ParseException {
Date startDate = testFormatWithTime.parse(startTime);
Date endDate = testFormatWithTime.parse(endTime);
long between = (endDate.getTime() - startDate.getTime()) / 1000;// return
// second
return between;
}
public static String getYearMonth(Date date) {
return dataFormatString.format(date).substring(0, 4);
}
public static String getDataFormatToUS(String date) {
return dataFormatToUS.format(date);
}
public static String getDataFormatToUS(Date date) throws ParseException {
return dataFormatToUS.format(date);
}
public static String getDataFormatWithTimeNoSecond(Date date) throws ParseException {
return dataFormatWithTimeNoSecond.format(date);
}
public static String getTimeStr() {
Calendar date = GregorianCalendar.getInstance();
String timeStr = dataFormatWithTime.format(date.getTime());
return timeStr;
}
public static String getTomorrowDateStr() {
Calendar tomorrowCalendar = GregorianCalendar.getInstance();
tomorrowCalendar.add(Calendar.DATE, 1);
String tomorrowDateStr = dataFormat.format(tomorrowCalendar.getTime());
return tomorrowDateStr;
}
public static String getCurrentDateStr() {
Calendar currentCalendar = GregorianCalendar.getInstance();
String currentDateStr = dataFormat.format(currentCalendar.getTime());
return currentDateStr;
}
public static String getDefaultCreateOrderDateFrom() {
Calendar nowDate = GregorianCalendar.getInstance();
nowDate.add(Calendar.MONTH, -1);
nowDate.set(Calendar.DAY_OF_MONTH, 1);
return dataFormat.format(nowDate.getTime());
}
public static String getDefaultCreateOrderDateTo() {
Calendar nowDate = GregorianCalendar.getInstance();
return dataFormat.format(nowDate.getTime());
}
public static String getPreviousSixMonthDate() {
Calendar nowDate = GregorianCalendar.getInstance();
nowDate.add(Calendar.MONTH, -6);
nowDate.set(Calendar.DAY_OF_MONTH, 1);
String date = dataFormat.format(nowDate.getTime());
return date;
}
public static String getPreviousThreeMonthDate() {
Calendar nowDate = GregorianCalendar.getInstance();
nowDate.add(Calendar.MONTH, -3);
nowDate.set(Calendar.DAY_OF_MONTH, 1);
String date = dataFormat.format(nowDate.getTime());
return date;
}
public static String getPreviousMonthDate(int beforeMonth) {
Calendar nowDate = GregorianCalendar.getInstance();
nowDate.add(Calendar.MONTH, beforeMonth);
nowDate.set(Calendar.DAY_OF_MONTH, 1);
String date = dataFormat.format(nowDate.getTime());
return date;
}
public static int getYearsSoFar() {
Calendar nowTime = GregorianCalendar.getInstance();
return nowTime.get(Calendar.YEAR);
}
public static Calendar getRequestShipDate(int day) {
Calendar currentCalendar = GregorianCalendar.getInstance();
currentCalendar.add(Calendar.DAY_OF_YEAR, day);
return currentCalendar;
}
public static Date getRangeDate(int day) {
Calendar currentCalendar = GregorianCalendar.getInstance();
currentCalendar.add(Calendar.DAY_OF_YEAR, day);
Date date = currentCalendar.getTime();
return date;
}
public static String millSecondFormat(Calendar calendar) {
if (calendar == null) {
return "";
}
return millSecondFormat.format(calendar.getTime());
}
public static boolean isDateField(String value) {
boolean isDateField = false;
SimpleDateFormat dMyDateFormat = new SimpleDateFormat(DateUtils.dMy);
dMyDateFormat.setLenient(false);
try {
dMyDateFormat.parse(value);
isDateField = true;
} catch (Exception e) {
isDateField = false;
}
return isDateField;
}
public static boolean isInTime(String periodTime) {
if (StringUtils.isBlank(periodTime)) {
return false;
}
String[] periodTimeArray = periodTime.split("-");
if (periodTimeArray == null || periodTimeArray.length < 2) {
return false;
}
String[] beginTimeArray = periodTimeArray[0].split(":");
String[] endTimeArray = periodTimeArray[1].split(":");
if (beginTimeArray == null || endTimeArray == null || beginTimeArray.length < 2 || endTimeArray.length < 2) {
return false;
}
int beginTime = Integer.parseInt(beginTimeArray[0]) * 60 + Integer.parseInt(beginTimeArray[1]);
int endTime = Integer.parseInt(endTimeArray[0]) * 60 + Integer.parseInt(endTimeArray[1]);
Calendar currentDateTime = Calendar.getInstance();
int hour = currentDateTime.get(Calendar.HOUR_OF_DAY);
int minute = currentDateTime.get(Calendar.MINUTE);
int currentTime = hour * 60 + minute;
if (currentTime >= beginTime && currentTime <= endTime) {
return true;
}
return false;
}
// format of periodTime must is Time-Time,Time is HH:mm or H:m or HH:m or
// H:mm, example : 07:16-18:22
// return value unit minute
public static int getDiffTime(String periodTime) {
if (StringUtils.isBlank(periodTime)) {
return -1;
}
String[] periodTimeArray = periodTime.split("-");
if (periodTimeArray == null || periodTimeArray.length < 2) {
return -1;
}
String[] beginTimeArray = periodTimeArray[0].split(":");
String[] endTimeArray = periodTimeArray[1].split(":");
if (beginTimeArray == null || endTimeArray == null || beginTimeArray.length < 2 || endTimeArray.length < 2) {
return -1;
}
int beginTime = Integer.parseInt(beginTimeArray[0]) * 60 + Integer.parseInt(beginTimeArray[1]);
int endTime = Integer.parseInt(endTimeArray[0]) * 60 + Integer.parseInt(endTimeArray[1]);
return endTime - beginTime;
}
public static String getCurrentYearDigits() {
Date nowDate = new Date(System.currentTimeMillis());
SimpleDateFormat bartDateFormat = dateFormatYear;
String year = bartDateFormat.format(nowDate);
return year;
}
public static String getWeekDay(int dayOfWeek) {
switch (dayOfWeek) {
case 1:
return SUNDAY;
case 2:
return MONDAY;
case 3:
return TUESDAY;
case 4:
return WEDNESDAY;
case 5:
return THURSDAY;
case 6:
return FRIDAY;
case 7:
return SATURDAY;
default:
return "Non";
}
}
public static String getMonth(int monthOfYear) {
switch (monthOfYear) {
case 0:
return JANUARY;
case 1:
return FEBRUARY;
case 2:
return MARCH;
case 3:
return APRIL;
case 4:
return MAY;
case 5:
return JUNE;
case 6:
return JULY;
case 7:
return AUGUST;
case 8:
return SEPTEMBER;
case 9:
return OCTOBER;
case 10:
return NOVEMBER;
case 11:
return DECEMBER;
default:
return "Non";
}
}
public static boolean isIncludeCurrentDay(String exeDays) {
Calendar currentDate = Calendar.getInstance();
int dayOfWeek = currentDate.get(Calendar.DAY_OF_WEEK);
return exeDays.contains(getWeekDay(dayOfWeek));
}
public static int transerWeekDay(int weekDay) throws Exception {
if (weekDay < 1 || weekDay > 7) {
throw new Exception("week day only 1-7");
}
if (weekDay == 7) {
return 1;
} else {
return weekDay + 1;
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.optionalToIf;
import com.intellij.codeInspection.dataFlow.DfaUtil;
import com.intellij.codeInspection.optionalToIf.Instruction.*;
import com.intellij.psi.PsiElementFactory;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiReference;
import com.intellij.psi.PsiVariable;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.EquivalenceChecker;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import static com.intellij.util.ObjectUtils.tryCast;
interface Simplifier {
Simplifier[] SIMPLIFIERS = {
new RemoveChecks(), new MergeChecks(), new RemoveAfterReturnOrThrow(), new MergeImmediateReturn(), new MergeImmediateAssignment()
};
List<Instruction> run(@NotNull List<Instruction> instructions);
@NotNull
static String simplify(@NotNull List<Instruction> instructions) {
return StreamEx.of(SIMPLIFIERS).foldLeft(instructions, (acc, s) -> s.run(acc))
.stream().map(Instruction::generate).collect(Collectors.joining());
}
class RemoveChecks implements Simplifier {
@Override
public List<Instruction> run(@NotNull List<Instruction> instructions) {
List<Instruction> simplified = new ArrayList<>();
for (Instruction instruction : instructions) {
Check check = tryCast(instruction, Check.class);
if (check != null && !check.hasElseBranch()) {
Boolean res = check.myInstructions.isEmpty() ? Boolean.FALSE : DfaUtil.evaluateCondition(check.myCondition);
if (Boolean.FALSE.equals(res)) continue;
check.myInstructions = run(check.myInstructions);
if (check.myInstructions.isEmpty() || Boolean.TRUE.equals(res)) {
simplified.addAll(check.myInstructions);
continue;
}
}
simplified.add(instruction);
}
return simplified;
}
}
class MergeChecks implements Simplifier {
@Override
@NotNull
public List<Instruction> run(@NotNull List<Instruction> instructions) {
List<Instruction> simplified = new ArrayList<>();
Instruction prev = null;
for (Instruction instruction : instructions) {
Check check = tryCast(instruction, Check.class);
if (check != null && !check.hasElseBranch()) {
check.myInstructions = run(check.myInstructions);
check = mergeWithInner(check);
instruction = check;
Check prevCheck = tryCast(prev, Check.class);
if (prevCheck != null && !prevCheck.hasElseBranch()) {
Check merged = mergeChecks(prevCheck, check);
if (merged != null) {
prev = merged;
simplified.set(simplified.size() - 1, prev);
continue;
}
}
}
prev = instruction;
simplified.add(instruction);
}
return simplified;
}
@NotNull
private static Check mergeWithInner(@NotNull Check check) {
Check innerCheck = tryCast(getSingleInstruction(check), Check.class);
if (innerCheck == null) return check;
List<Instruction> checkInstructions = innerCheck.myInstructions;
PsiExpression conjunction = mergeConditions(check, innerCheck, "&&");
return new Check(conjunction, checkInstructions, null);
}
@NotNull
private static PsiExpression mergeConditions(@NotNull Check c1, @NotNull Check c2, @NotNull String operator) {
PsiExpression cond1 = c1.myCondition;
PsiExpression cond2 = c2.myCondition;
PsiElementFactory factory = PsiElementFactory.getInstance(cond1.getProject());
return factory.createExpressionFromText(ParenthesesUtils.getText(cond1, ParenthesesUtils.OR_PRECEDENCE) +
operator +
ParenthesesUtils.getText(cond2, ParenthesesUtils.OR_PRECEDENCE), cond1);
}
@Nullable
private static Check mergeChecks(@NotNull Instruction prev, @NotNull Check check) {
Check prevCheck = tryCast(prev, Check.class);
if (prevCheck == null) return null;
Throw prevThrow = tryCast(getSingleInstruction(prevCheck), Throw.class);
if (prevThrow == null) return null;
Throw curThrow = tryCast(getSingleInstruction(check), Throw.class);
if (curThrow == null) return null;
boolean isSameException = EquivalenceChecker.getCanonicalPsiEquivalence()
.expressionsAreEquivalent(prevThrow.myException, curThrow.myException);
if (!isSameException) return null;
PsiExpression disjunction = mergeConditions(prevCheck, check, "||");
return new Check(disjunction, check.myInstructions, null);
}
private static Instruction getSingleInstruction(@NotNull Check check) {
return ContainerUtil.getOnlyItem(check.myInstructions);
}
}
/**
* Removes all code that appears after throw or return instruction.
* This might happen after applying other simplifications, in particular the ones that remove redundant checks.
* E.g. for method
* String test(String in) {
* if (in == null) return "foo";
* return Optional.ofNullable(in).orElse("bar");
* }
* We would have two instructions:
* - Check(in != null) with Return(in) inside
* - Return "bar"
*
* After simplification of Check that is always true we
* end up with two returns in the row, so the second one must be removed.
*/
class RemoveAfterReturnOrThrow implements Simplifier {
@Override
public List<Instruction> run(@NotNull List<Instruction> instructions) {
List<Instruction> simplified = new ArrayList<>();
for (Instruction instruction : instructions) {
Check check = tryCast(instruction, Check.class);
if (check != null && !check.hasElseBranch()) check.myInstructions = run(check.myInstructions);
simplified.add(instruction);
if (instruction instanceof Return || instruction instanceof Throw) return simplified;
}
return simplified;
}
}
class MergeImmediateReturn implements Simplifier {
@Override
public List<Instruction> run(@NotNull List<Instruction> instructions) {
List<Instruction> simplified = new ArrayList<>();
Instruction prev = null;
for (Instruction instruction : instructions) {
Return ret = tryCast(instruction, Return.class);
if (ret != null) {
Return merged = mergeReturn(ret, prev);
if (merged != null) {
simplified.set(simplified.size() - 1, merged);
prev = merged;
continue;
}
}
else {
Check check = tryCast(instruction, Check.class);
if (check != null && !check.hasElseBranch()) check.myInstructions = run(check.myInstructions);
}
simplified.add(instruction);
prev = instruction;
}
return simplified;
}
@Nullable
private Return mergeReturn(@NotNull Return ret, @Nullable Instruction prev) {
PsiVariable retVariable = getReturnVariable(ret);
if (retVariable == null) return null;
Declaration declaration = tryCast(prev, Declaration.class);
if (declaration != null) return mergeReturn(retVariable, declaration.myLhs, declaration.myRhs);
Assignment assignment = tryCast(prev, Assignment.class);
if (assignment != null) return mergeReturn(retVariable, assignment.myLhs, assignment.myRhs);
return null;
}
@Nullable
PsiVariable getReturnVariable(@NotNull Return ret) {
PsiReference reference = tryCast(ret.myExpression, PsiReference.class);
if (reference == null) return null;
return tryCast(reference.resolve(), PsiVariable.class);
}
@Nullable
@Contract(pure = true)
private static Return mergeReturn(@NotNull PsiVariable retVariable, @NotNull PsiVariable lhs, @NotNull PsiExpression rhs) {
return retVariable != lhs ? null : new Return(rhs);
}
}
class MergeImmediateAssignment implements Simplifier {
@Override
public List<Instruction> run(@NotNull List<Instruction> instructions) {
List<Instruction> simplified = new ArrayList<>();
Instruction prev = null;
for (Instruction instruction : instructions) {
Assignment assignment = tryCast(instruction, Assignment.class);
if (assignment != null) {
Declaration declaration = tryCast(prev, Declaration.class);
if (declaration != null && declaration.myLhs == assignment.myLhs) {
Declaration merged = new Declaration(declaration.myLhs, assignment.myRhs);
simplified.set(simplified.size() - 1, merged);
prev = merged;
continue;
}
}
else {
Check check = tryCast(instruction, Check.class);
if (check != null && !check.hasElseBranch()) check.myInstructions = run(check.myInstructions);
}
simplified.add(instruction);
prev = instruction;
}
return simplified;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraffa;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.apache.giraffa.RenameRecoveryState.PUT_SETFLAG;
import static org.apache.giraffa.RenameRecoveryState.PUT_NOFLAG;
import static org.apache.giraffa.RenameRecoveryState.DELETE;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.giraffa.hbase.INodeManager;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestRename {
static final Log LOG = LogFactory.getLog(TestRename.class);
private static final HBaseTestingUtility UTIL =
GiraffaTestUtils.getHBaseTestingUtility();
private GiraffaFileSystem grfs;
private Connection connection;
private RowKeyFactory keyFactory;
private INodeManager nodeManager;
@BeforeClass
public static void beforeClass() throws Exception {
System.setProperty(
HBaseCommonTestingUtility.BASE_TEST_DIRECTORY_KEY,
GiraffaTestUtils.BASE_TEST_DIRECTORY);
UTIL.startMiniCluster(1);
}
@Before
public void before() throws IOException {
GiraffaConfiguration conf =
new GiraffaConfiguration(UTIL.getConfiguration());
GiraffaTestUtils.setGiraffaURI(conf);
GiraffaFileSystem.format(conf, false);
grfs = (GiraffaFileSystem) FileSystem.get(conf);
connection = ConnectionFactory.createConnection(conf);
keyFactory = RowKeyFactoryProvider.createFactory(conf);
nodeManager = GiraffaTestUtils.getNodeManager(conf, connection, keyFactory);
}
@After
public void after() throws IOException {
IOUtils.cleanup(LOG, grfs, nodeManager, connection);
}
@AfterClass
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
private void createTestFile(String srcStr, char c) throws IOException {
Path src = new Path(srcStr);
FSDataOutputStream out = grfs.create(src, true, 5000, (short) 3, 512);
for(int j = 0; j < 2000; j++) {
out.write(c);
}
out.close();
}
private char readFile(Path src) throws IOException {
FSDataInputStream input = grfs.open(src);
try {
int c = input.read();
return (char) c;
}finally {
input.close();
}
}
/**
* Completes the given stage of the rename process.
*/
private void doRenameStage(String src, String dst, RenameRecoveryState stage)
throws IOException {
src = new Path(grfs.getWorkingDirectory(), src).toUri().getPath();
dst = new Path(grfs.getWorkingDirectory(), dst).toUri().getPath();
if (stage == PUT_SETFLAG) {
LOG.debug("Copying " + src + " to " + dst + " with rename flag");
INode srcNode = nodeManager.getINode(src);
RowKey dstKey = keyFactory.newInstance(dst, srcNode.getId());
INode dstNode = srcNode.cloneWithNewRowKey(dstKey);
dstNode.setRenameState(RenameState.TRUE(srcNode.getRowKey().getKey()));
nodeManager.updateINode(dstNode, null, nodeManager.getXAttrs(src));
}
if (stage == DELETE) {
INode srcNode = nodeManager.getINode(src);
nodeManager.delete(srcNode);
}
if (stage == PUT_NOFLAG) {
INode dstNode = nodeManager.getINode(dst);
dstNode.setRenameState(RenameState.FALSE());
nodeManager.updateINode(dstNode);
}
}
private void renameFile(String src, String dst, boolean overwrite)
throws IOException {
Path srcPath = new Path(src);
Path dstPath = new Path(dst);
grfs.rename(srcPath, dstPath, overwrite ? Rename.OVERWRITE : Rename.NONE);
assertTrue(grfs.exists(dstPath));
assertFalse(grfs.exists(srcPath));
assertTrue(readFile(dstPath) == 'A');
}
/**
* Collects type and data information about the children of a directory.
* @param path the directory whose children to analyze
* @param isFile stores whether or not the node is a file for each child
* @param firstChar stores the first character of each child that is a file
*/
private void collectDirectoryChildrenInfo(Path path,
Map<Path,Boolean> isFile,
Map<Path,Character> firstChar)
throws IOException {
RemoteIterator<LocatedFileStatus> children = grfs.listFiles(path, true);
while(children.hasNext()) {
Path cur = new Path(children.next().getPath().toUri().getPath());
// relative child paths will not change after renameFile
Path rel = new Path(path.toUri().relativize(cur.toUri()));
if(grfs.isFile(cur)) {
isFile.put(rel, true);
firstChar.put(rel, readFile(cur));
}else {
isFile.put(rel, false);
}
}
}
private void renameDir(String src, String dst, boolean overwrite)
throws IOException {
// collect information on children of src
Map<Path,Boolean> isFile = new HashMap<Path,Boolean>();
Map<Path,Character> firstChar = new HashMap<Path,Character>();
collectDirectoryChildrenInfo(new Path(src), isFile, firstChar);
renameDir(src, dst, overwrite, isFile, firstChar);
}
private void renameDir(String src, String dst, boolean overwrite,
Map<Path,Boolean> isFile1,
Map<Path,Character> firstChar1)
throws IOException {
Path srcPath = new Path(src);
Path dstPath = new Path(dst);
grfs.rename(srcPath, dstPath, overwrite ? Rename.OVERWRITE : Rename.NONE);
// check that src has moved to dst
assertTrue(grfs.exists(dstPath));
assertFalse(grfs.exists(srcPath));
// collect information on children of dst
Map<Path,Boolean> isFile2 = new HashMap<Path,Boolean>();
Map<Path,Character> firstChar2 = new HashMap<Path,Character>();
collectDirectoryChildrenInfo(dstPath, isFile2, firstChar2);
// check that information on src children was properly copied
assertEquals(isFile1, isFile2);
assertEquals(firstChar1, firstChar2);
// check that src children no longer exist
for(Path path : isFile1.keySet()) {
assertFalse("Path "+path+" exists", grfs.exists(new Path(srcPath, path)));
}
}
// ==== FILE RENAME: SUCCESS CASES ====
@Test
public void testFileRename() throws IOException {
createTestFile("test", 'A');
renameFile("test", "test2", false);
}
@Test
public void testFileRenameRecoveryStage1Complete() throws IOException {
grfs.mkdirs(new Path("dir"));
createTestFile("test", 'A');
doRenameStage("test", "dir/test", PUT_SETFLAG);
renameFile("test", "dir/test", false);
}
@Test
public void testFileRenameRecoveryStage2Complete() throws IOException {
grfs.mkdirs(new Path("dir"));
createTestFile("test", 'A');
doRenameStage("test", "dir/test", PUT_SETFLAG);
doRenameStage("test", "dir/test", DELETE);
renameFile("test", "dir/test", false);
}
@Test
public void testFileRenameOverwrite() throws IOException {
createTestFile("test", 'A');
createTestFile("test2", 'B');
renameFile("test", "test2", true);
}
@Test
public void testFileMove() throws IOException {
grfs.mkdirs(new Path("dir"));
createTestFile("test", 'A');
renameFile("test", "dir/test", false);
}
@Test
public void testFileMoveOverwrite() throws IOException {
grfs.mkdirs(new Path("dir"));
createTestFile("test", 'A');
createTestFile("dir/test", 'B');
renameFile("test", "dir/test", true);
}
// ==== FILE RENAME: FAIL CASES ===-=
@Test(expected = FileNotFoundException.class)
public void testFileRenameSrcMissingAndDstMissing() throws IOException {
renameFile("test", "test2", false);
}
@Test(expected = FileNotFoundException.class)
public void testFileRenameSrcMissingDstExists() throws IOException {
createTestFile("test2", 'B');
renameFile("test", "test2", false);
}
@Test(expected = FileAlreadyExistsException.class)
public void testFileRenameDstExistsNoOverwrite() throws IOException {
createTestFile("test", 'A');
createTestFile("test2", 'B');
renameFile("test", "test2", false);
}
@Test(expected = FileAlreadyExistsException.class)
public void testFileRenameDstEqualsSrc() throws IOException {
createTestFile("test", 'A');
renameFile("test", "test", false);
}
@Test(expected = IOException.class)
public void testFileMoveDstExistsAsDirectory() throws IOException {
createTestFile("test", 'A');
grfs.mkdirs(new Path("test2"));
renameFile("test", "test2", true);
}
@Test(expected = FileNotFoundException.class)
public void testFileMoveDstParentDoesNotExist() throws IOException {
createTestFile("test", 'A');
renameFile("test", "dir/test2", false);
}
@Test(expected = ParentNotDirectoryException.class)
public void testFileMoveDstParentIsFile() throws IOException {
createTestFile("test", 'A');
createTestFile("file", 'C');
renameFile("test", "file/test2", false);
}
// ==== DIRECTORY RENAME: SUCCESS CASES ====
@Test
public void testDirRename() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
renameDir("/a", "/newA", false);
}
@Test
public void testDirRenameOverwrite() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
grfs.mkdirs(new Path("/newA")); // empty dst directory
renameDir("/a", "/newA", true);
}
@Test
public void testDirRenameNoChildren() throws IOException {
grfs.mkdirs(new Path("/x/a"));
renameDir("/x/a", "/x/newA", false);
}
@Test
public void testDirRenameRecoveryStage1PartlyComplete() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
grfs.mkdirs(new Path("/dir"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
doRenameStage("/a/1", "/newA/1", PUT_SETFLAG);
doRenameStage("/a/2", "/newA/2", PUT_SETFLAG);
doRenameStage("/a/b", "/newA/b", PUT_SETFLAG);
renameDir("/a", "/newA", false);
}
@Test
public void testDirRenameRecoveryStage2PartlyComplete() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
grfs.mkdirs(new Path("/dir"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
// collect src information before doing partial rename
Map<Path,Boolean> isFile = new HashMap<Path,Boolean>();
Map<Path,Character> firstChar = new HashMap<Path,Character>();
collectDirectoryChildrenInfo(new Path("/a"), isFile, firstChar);
doRenameStage("/a", "/dir/a", PUT_SETFLAG);
doRenameStage("/a/1", "/dir/a/1", PUT_SETFLAG);
doRenameStage("/a/2", "/dir/a/2", PUT_SETFLAG);
doRenameStage("/a/b", "/dir/a/b", PUT_SETFLAG);
doRenameStage("/a/b/1", "/dir/a/b/1", PUT_SETFLAG);
doRenameStage("/a/b/2", "/dir/a/b/2", PUT_SETFLAG);
doRenameStage("/a/b/c", "/dir/a/b/c", PUT_SETFLAG);
doRenameStage("/a/b/c/1", "/dir/a/b/c/1", PUT_SETFLAG);
doRenameStage("/a/b/c/2", "/dir/a/b/c/2", PUT_SETFLAG);
// deletes occur recursively from bottom to top
doRenameStage("/a/b/c/2", "/dir/a/b/c/2", DELETE);
doRenameStage("/a/b/c/1", "/dir/a/b/c/1", DELETE);
doRenameStage("/a/b/c", "/dir/a/b/c", DELETE);
doRenameStage("/a/b/2", "/dir/a/b/2", DELETE);
renameDir("/a", "/dir/a", false, isFile, firstChar);
}
@Test
public void testDirRenameRecoveryStage3PartlyComplete() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
grfs.mkdirs(new Path("/dir"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
// collect src information before doing partial rename
Map<Path,Boolean> isFile = new HashMap<Path,Boolean>();
Map<Path,Character> firstChar = new HashMap<Path,Character>();
collectDirectoryChildrenInfo(new Path("/a"), isFile, firstChar);
doRenameStage("/a", "/dir/a", PUT_SETFLAG);
doRenameStage("/a/1", "/dir/a/1", PUT_SETFLAG);
doRenameStage("/a/2", "/dir/a/2", PUT_SETFLAG);
doRenameStage("/a/b", "/dir/a/b", PUT_SETFLAG);
doRenameStage("/a/b/1", "/dir/a/b/1", PUT_SETFLAG);
doRenameStage("/a/b/2", "/dir/a/b/2", PUT_SETFLAG);
doRenameStage("/a/b/c", "/dir/a/b/c", PUT_SETFLAG);
doRenameStage("/a/b/c/1", "/dir/a/b/c/1", PUT_SETFLAG);
doRenameStage("/a/b/c/2", "/dir/a/b/c/2", PUT_SETFLAG);
// deletes occur recursively from bottom to top
doRenameStage("/a/b/c/2", "/dir/a/b/c/2", DELETE);
doRenameStage("/a/b/c/1", "/dir/a/b/c/1", DELETE);
doRenameStage("/a/b/c", "/dir/a/b/c", DELETE);
doRenameStage("/a/b/2", "/dir/a/b/2", DELETE);
doRenameStage("/a/b/1", "/dir/a/b/1", DELETE);
doRenameStage("/a/b", "/dir/a/b", DELETE);
doRenameStage("/a/2", "/dir/a/2", DELETE);
doRenameStage("/a/1", "/dir/a/1", DELETE);
doRenameStage("/a", "/dir/a", DELETE);
doRenameStage("/a/1", "/dir/a/1", PUT_NOFLAG);
doRenameStage("/a/2", "/dir/a/2", PUT_NOFLAG);
doRenameStage("/a/b", "/dir/a/b", PUT_NOFLAG);
renameDir("/a", "/dir/a", false, isFile, firstChar);
}
@Test
public void testDirMoveDown() throws IOException {
grfs.mkdirs(new Path("/a/b/c"));
grfs.mkdirs(new Path("/d"));
createTestFile("/a/1", 't');
createTestFile("/a/2", 'u');
createTestFile("/a/b/1", 'v');
createTestFile("/a/b/2", 'w');
createTestFile("/a/b/c/1", 'x');
createTestFile("/a/b/c/2", 'y');
renameDir("/a", "/d/newA", false);
}
@Test
public void testDirMoveUp() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
renameDir("/x/y/a", "/x/newA", false);
}
@Test
public void testDirMoveOverwrite() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
grfs.mkdirs(new Path("/x/newA")); // empty dst directory
renameDir("/x/y/a", "/x/newA", true);
}
// ==== DIRECTORY RENAME: FAIL CASES ====
@Test(expected = IOException.class)
public void testDirRenameSrcIsRoot() throws IOException {
renameDir("/", "test", false);
}
@Test(expected = FileAlreadyExistsException.class)
public void testDirRenameDstExistsNoOverwrite() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
grfs.mkdirs(new Path("/x/newA")); // empty dst directory
renameDir("/x/y/a", "/x/newA", false);
}
@Test(expected = IOException.class)
public void testDirRenameDstExistsAsFile() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
createTestFile("/x/newA", 'A'); // destination exists as file
renameDir("/x/y/a", "/x/newA", true);
}
@Test(expected = IOException.class)
public void testDirRenameDstNotEmpty() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
// create non-empty destination dir
grfs.mkdirs(new Path("/x/newA"));
createTestFile("/x/newA/test", 'A');
renameDir("/x/y/a", "/x/newA", true);
}
@Test(expected = IOException.class)
public void testDirRenameDstIsRoot() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
renameDir("/x/y/a", "/", true);
}
@Test(expected = FileNotFoundException.class)
public void testDirRenameDstParentDoesNotExist() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
renameDir("/x/y/a", "/x/z/newA", false);
}
@Test(expected = ParentNotDirectoryException.class)
public void testDirRenameDstParentIsFile() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
createTestFile("/x/z", 'A'); // destination parent exists as file
renameDir("/x/y/a", "/x/z/newA", false);
}
@Test(expected = FileAlreadyExistsException.class)
public void testDirRenameDstEqualsSrc() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
renameDir("/x/y/a", "/x/y/a", true);
}
@Test(expected = IOException.class)
public void testDirRenameDstIsInsideSrc() throws IOException {
grfs.mkdirs(new Path("x/y/a/b/c"));
createTestFile("/x/y/a/1", 't');
createTestFile("/x/y/a/2", 'u');
createTestFile("/x/y/a/b/1", 'v');
createTestFile("/x/y/a/b/2", 'w');
createTestFile("/x/y/a/b/c/1", 'x');
createTestFile("/x/y/a/b/c/2", 'y');
renameDir("/x/y/a", "/x/y/a/newA", false);
}
}
| |
/*
**The MIT License (MIT)
**Copyright (c) <2014> <CIn-UFPE>
**
**Permission is hereby granted, free of charge, to any person obtaining a copy
**of this software and associated documentation files (the "Software"), to deal
**in the Software without restriction, including without limitation the rights
**to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
**copies of the Software, and to permit persons to whom the Software is
**furnished to do so, subject to the following conditions:
**
**The above copyright notice and this permission notice shall be included in
**all copies or substantial portions of the Software.
**
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
**IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
**FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
**AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
**LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
**OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
**THE SOFTWARE.
*/
package org.lanca.gui;
import java.awt.CardLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Image;
import java.awt.Toolkit;
import java.util.List;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import org.caboclo.clients.AmazonClient;
import org.caboclo.clients.ApiClient;
import org.caboclo.clients.CloudService;
import org.caboclo.clients.DropboxClient;
import org.caboclo.clients.GoogleDriveClient;
import org.caboclo.clients.OpenStackClient;
import org.caboclo.clients.OneDriveClient;
import org.caboclo.util.Credentials;
public class AuthDialog extends javax.swing.JFrame {
private CardLayout cards;
private ApiClient dropboxClient;
private ApiClient googleDriveClient;
private ApiClient oneDriveClient;
private ApiClient openStackClient;
private ApiClient amazonClient;
private AuthPanel dropboxPanel;
private AuthPanel googleDrivePanel;
private AuthPanel oneDrivePanel;
private AuthPanel openStackPanel;
private AuthPanel amazonPanel;
private final Credentials credentials;
/**
* Creates new form AuthDialog
*/
// public AuthDialog(java.awt.Frame parent, boolean modal) {
// super(parent, modal);
//
// initComponents();
// initPanels();
//
// updateComboBoxPanel();
// }
public AuthDialog() {
initComponents();
Dimension dim = Toolkit.getDefaultToolkit().getScreenSize();
this.setLocation(dim.width / 2 - this.getSize().width / 2, dim.height / 2 - this.getSize().height / 2);
initPanels();
credentials = new Credentials();
initializeServicesComboBox();
updateComboBoxPanel();
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
servicesComboBox = new javax.swing.JComboBox();
jSeparator1 = new javax.swing.JSeparator();
jLabel2 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
cloudServiceComboBox = new javax.swing.JComboBox();
authPanel = new javax.swing.JPanel();
createAccountButton = new javax.swing.JButton();
loginSavedAccountButton = new javax.swing.JButton();
deleteAccountButton = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setIconImage(getIconImage());
setLocationByPlatform(true);
jLabel1.setText("Choose saved account...");
servicesComboBox.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "<no account saved>" }));
jSeparator1.setOrientation(javax.swing.SwingConstants.VERTICAL);
jLabel2.setText("... or enter a new account");
jLabel4.setText("Cloud service:");
cloudServiceComboBox.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Dropbox", "Google Drive", "OneDrive", "Amazon S3", "OpenStack" }));
cloudServiceComboBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cloudServiceComboBoxActionPerformed(evt);
}
});
authPanel.setLayout(new java.awt.CardLayout());
createAccountButton.setText("Create account and login");
createAccountButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
createAccountButtonActionPerformed(evt);
}
});
loginSavedAccountButton.setText("Login");
loginSavedAccountButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
loginSavedAccountButtonActionPerformed(evt);
}
});
deleteAccountButton.setText("Delete account");
deleteAccountButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
deleteAccountButtonActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1)
.addComponent(servicesComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, 200, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(layout.createSequentialGroup()
.addComponent(loginSavedAccountButton)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(deleteAccountButton)))
.addGap(18, 18, 18)
.addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 13, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2)
.addComponent(createAccountButton)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel4)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(cloudServiceComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, 125, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGap(0, 118, Short.MAX_VALUE))
.addComponent(authPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(cloudServiceComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel1)
.addGap(18, 18, 18)
.addComponent(servicesComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(45, 45, 45)
.addComponent(authPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 245, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(createAccountButton))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(loginSavedAccountButton)
.addComponent(deleteAccountButton)))))
.addComponent(jSeparator1))
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void cloudServiceComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cloudServiceComboBoxActionPerformed
updateComboBoxPanel();
}//GEN-LAST:event_cloudServiceComboBoxActionPerformed
private void createAccountButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createAccountButtonActionPerformed
final JFrame _this = this;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
boolean b = getVisiblePanel().authenticate();
if (b) {
MainForm form = new MainForm(getVisiblePanel().getClient());
form.setVisible(true);
_this.setVisible(false);
} else {
JOptionPane.showMessageDialog(_this,
"Could not authenticate with the cloud server",
"Authentication error",
JOptionPane.ERROR_MESSAGE);
}
}
});
}//GEN-LAST:event_createAccountButtonActionPerformed
private void loginSavedAccountButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_loginSavedAccountButtonActionPerformed
String selectedService = getSelectedService();
ApiClient client = credentials.retrieveAccount(selectedService);
if (client == null) {
// TODO invalid saved credentials...
System.out.println("Invalid credentials.");
return;
}
MainForm form = new MainForm(client);
form.setVisible(true);
this.setVisible(false);
}//GEN-LAST:event_loginSavedAccountButtonActionPerformed
private void deleteAccountButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteAccountButtonActionPerformed
Object item = servicesComboBox.getSelectedItem();
if (item != null) {
String service = item.toString();
if (!service.isEmpty()) {
credentials.removeCredentials(service);
servicesComboBox.removeItem(service);
}
}
if (servicesComboBox.getItemCount()<1){
servicesComboBox.addItem("<no account saved>");
deleteAccountButton.setEnabled(false);
loginSavedAccountButton.setEnabled(false);
}
}//GEN-LAST:event_deleteAccountButtonActionPerformed
@Override
public Image getIconImage() {
return Toolkit.getDefaultToolkit().getImage(getClass().getResource("/org/lanca/gui/icons/Caboclo.png"));
}
private void updateComboBoxPanel() {
String selectedItem = (String) cloudServiceComboBox.getSelectedItem();
cards.show(authPanel, selectedItem);
this.pack();
}
private void initPanels() {
openStackClient = new OpenStackClient();
dropboxClient = new DropboxClient();
googleDriveClient = new GoogleDriveClient();
amazonClient = new AmazonClient();
oneDriveClient = new OneDriveClient();
dropboxPanel = new OAuthPanel(dropboxClient);
googleDrivePanel = new OAuthPanel(googleDriveClient);
oneDrivePanel = new OAuthPanel(oneDriveClient);
openStackPanel = new OpenStackPanel(openStackClient);
amazonPanel = new AmazonPanel(amazonClient);
authPanel.add(dropboxPanel, CloudService.DROPBOX.toString());
authPanel.add(googleDrivePanel, CloudService.GOOGLE_DRIVE.toString());
authPanel.add(oneDrivePanel, CloudService.ONE_DRIVE.toString());
authPanel.add(openStackPanel, CloudService.OPEN_STACK.toString());
authPanel.add(amazonPanel, CloudService.AMAZON_S3.toString());
cards = (CardLayout) authPanel.getLayout();
}
private AuthPanel getVisiblePanel() {
AuthPanel card = null;
for (Component comp : authPanel.getComponents()) {
if (comp.isVisible() == true) {
card = (AuthPanel) comp;
}
}
return card;
}
private void initializeServicesComboBox() {
List<String> savedAccounts = credentials.retrieveSavedAccounts();
if (savedAccounts.size() < 1) {
servicesComboBox.setEnabled(false);
deleteAccountButton.setEnabled(false);
loginSavedAccountButton.setEnabled(false);
return;
}else{
servicesComboBox.setEnabled(true);
deleteAccountButton.setEnabled(true);
loginSavedAccountButton.setEnabled(true);
}
System.out.println(savedAccounts);
DefaultComboBoxModel<String> dcbm = (DefaultComboBoxModel<String>) servicesComboBox.getModel();
dcbm.removeAllElements();
for (String account : savedAccounts) {
dcbm.addElement(account);
}
}
// <editor-fold defaultstate="collapsed" desc="Generated Code">
public static void main(String args[]) {
/* Create and display the dialog */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
//AuthDialog dialog = new AuthDialog(new javax.swing.JFrame(), true);
AuthDialog dialog = new AuthDialog();
dialog.addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent e) {
System.exit(0);
}
});
dialog.setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPanel authPanel;
private javax.swing.JComboBox cloudServiceComboBox;
private javax.swing.JButton createAccountButton;
private javax.swing.JButton deleteAccountButton;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel4;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JButton loginSavedAccountButton;
private javax.swing.JComboBox servicesComboBox;
// End of variables declaration//GEN-END:variables
// </editor-fold>
private String getSelectedService() {
return servicesComboBox.getSelectedItem().toString();
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.logic.v2018_07_01_preview.implementation;
import com.microsoft.azure.management.logic.v2018_07_01_preview.IntegrationAccountAgreement;
import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl;
import rx.Observable;
import java.util.Map;
import org.joda.time.DateTime;
import com.microsoft.azure.management.logic.v2018_07_01_preview.AgreementType;
import com.microsoft.azure.management.logic.v2018_07_01_preview.BusinessIdentity;
import com.microsoft.azure.management.logic.v2018_07_01_preview.AgreementContent;
class IntegrationAccountAgreementImpl extends CreatableUpdatableImpl<IntegrationAccountAgreement, IntegrationAccountAgreementInner, IntegrationAccountAgreementImpl> implements IntegrationAccountAgreement, IntegrationAccountAgreement.Definition, IntegrationAccountAgreement.Update {
private final LogicManager manager;
private String resourceGroupName;
private String integrationAccountName;
private String agreementName;
IntegrationAccountAgreementImpl(String name, LogicManager manager) {
super(name, new IntegrationAccountAgreementInner());
this.manager = manager;
// Set resource name
this.agreementName = name;
//
}
IntegrationAccountAgreementImpl(IntegrationAccountAgreementInner inner, LogicManager manager) {
super(inner.name(), inner);
this.manager = manager;
// Set resource name
this.agreementName = inner.name();
// set resource ancestor and positional variables
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.integrationAccountName = IdParsingUtils.getValueFromIdByName(inner.id(), "integrationAccounts");
this.agreementName = IdParsingUtils.getValueFromIdByName(inner.id(), "agreements");
//
}
@Override
public LogicManager manager() {
return this.manager;
}
@Override
public Observable<IntegrationAccountAgreement> createResourceAsync() {
IntegrationAccountAgreementsInner client = this.manager().inner().integrationAccountAgreements();
return client.createOrUpdateAsync(this.resourceGroupName, this.integrationAccountName, this.agreementName, this.inner())
.map(innerToFluentMap(this));
}
@Override
public Observable<IntegrationAccountAgreement> updateResourceAsync() {
IntegrationAccountAgreementsInner client = this.manager().inner().integrationAccountAgreements();
return client.createOrUpdateAsync(this.resourceGroupName, this.integrationAccountName, this.agreementName, this.inner())
.map(innerToFluentMap(this));
}
@Override
protected Observable<IntegrationAccountAgreementInner> getInnerAsync() {
IntegrationAccountAgreementsInner client = this.manager().inner().integrationAccountAgreements();
return client.getAsync(this.resourceGroupName, this.integrationAccountName, this.agreementName);
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
@Override
public AgreementType agreementType() {
return this.inner().agreementType();
}
@Override
public DateTime changedTime() {
return this.inner().changedTime();
}
@Override
public AgreementContent content() {
return this.inner().content();
}
@Override
public DateTime createdTime() {
return this.inner().createdTime();
}
@Override
public BusinessIdentity guestIdentity() {
return this.inner().guestIdentity();
}
@Override
public String guestPartner() {
return this.inner().guestPartner();
}
@Override
public BusinessIdentity hostIdentity() {
return this.inner().hostIdentity();
}
@Override
public String hostPartner() {
return this.inner().hostPartner();
}
@Override
public String id() {
return this.inner().id();
}
@Override
public String location() {
return this.inner().location();
}
@Override
public Object metadata() {
return this.inner().metadata();
}
@Override
public String name() {
return this.inner().name();
}
@Override
public Map<String, String> tags() {
return this.inner().getTags();
}
@Override
public String type() {
return this.inner().type();
}
@Override
public IntegrationAccountAgreementImpl withExistingIntegrationAccount(String resourceGroupName, String integrationAccountName) {
this.resourceGroupName = resourceGroupName;
this.integrationAccountName = integrationAccountName;
return this;
}
@Override
public IntegrationAccountAgreementImpl withAgreementType(AgreementType agreementType) {
this.inner().withAgreementType(agreementType);
return this;
}
@Override
public IntegrationAccountAgreementImpl withContent(AgreementContent content) {
this.inner().withContent(content);
return this;
}
@Override
public IntegrationAccountAgreementImpl withGuestIdentity(BusinessIdentity guestIdentity) {
this.inner().withGuestIdentity(guestIdentity);
return this;
}
@Override
public IntegrationAccountAgreementImpl withGuestPartner(String guestPartner) {
this.inner().withGuestPartner(guestPartner);
return this;
}
@Override
public IntegrationAccountAgreementImpl withHostIdentity(BusinessIdentity hostIdentity) {
this.inner().withHostIdentity(hostIdentity);
return this;
}
@Override
public IntegrationAccountAgreementImpl withHostPartner(String hostPartner) {
this.inner().withHostPartner(hostPartner);
return this;
}
@Override
public IntegrationAccountAgreementImpl withLocation(String location) {
this.inner().withLocation(location);
return this;
}
@Override
public IntegrationAccountAgreementImpl withMetadata(Object metadata) {
this.inner().withMetadata(metadata);
return this;
}
@Override
public IntegrationAccountAgreementImpl withTags(Map<String, String> tags) {
this.inner().withTags(tags);
return this;
}
}
| |
package com.ivanmagda.habito.activities;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import android.widget.TextView;
import com.github.mikephil.charting.charts.BarChart;
import com.ivanmagda.habito.R;
import com.ivanmagda.habito.barchart.HabitoBarChartConfigurator;
import com.ivanmagda.habito.barchart.HabitoBarChartDataLoader;
import com.ivanmagda.habito.barchart.HabitoBarChartDataSource;
import com.ivanmagda.habito.barchart.HabitoBarChartRange;
import com.ivanmagda.habito.models.Habit;
import com.ivanmagda.habito.sync.FirebaseSyncUtils;
import com.ivanmagda.habito.view.model.HabitDetailViewModel;
import com.ivanmagda.habito.view.model.HabitoBarChartViewModel;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class DetailHabitActivity extends AppCompatActivity
implements AdapterView.OnItemSelectedListener,
LoaderManager.LoaderCallbacks<HabitoBarChartDataSource> {
public static final String HABIT_EXTRA_KEY = "com.ivanmagda.habito.activities.habit";
private static final String TAG = "DetailHabitActivity";
private static final int BAR_CHART_DATA_SOURCE_LOADER = 1;
private static final int RC_EDIT_HABIT = 1234;
@BindView(R.id.bar_chart)
BarChart barChart;
@BindView(R.id.tv_score)
TextView scoreTextView;
@BindView(R.id.sp_date_range)
Spinner dateRangeSpinner;
@BindView(R.id.tv_date_range)
TextView dateRangeTextView;
private Habit mHabit;
private HabitoBarChartConfigurator mBarChartConfigurator;
private HabitoBarChartRange.DateRange mBarChartRange = HabitoBarChartRange.DateRange.WEEK;
private HabitDetailViewModel mViewModel = new HabitDetailViewModel();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
configure();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_detail_habit, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
case R.id.action_edit:
editHabit();
return true;
case R.id.action_delete:
delete();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == RC_EDIT_HABIT && resultCode == RESULT_OK) {
mHabit = data.getParcelableExtra(EditHabitActivity.EDIT_HABIT_RESULT);
updateUI();
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
private void configure() {
setContentView(R.layout.activity_detail_habit);
ButterKnife.bind(this);
mBarChartConfigurator = new HabitoBarChartConfigurator(barChart);
getHabitFromExtras();
configureDateSpinner();
updateUI();
}
private void updateUI() {
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setTitle(mHabit.getRecord().getName());
}
String scoreString = mViewModel.getScoreString(mHabit.getRecord().getScore());
scoreTextView.setText(scoreString);
dateRangeTextView.setText(mViewModel.getDateRangeString());
getSupportLoaderManager().restartLoader(BAR_CHART_DATA_SOURCE_LOADER, null, this);
}
private void getHabitFromExtras() {
Intent intent = getIntent();
if (intent.hasExtra(HABIT_EXTRA_KEY)) {
mHabit = intent.getParcelableExtra(HABIT_EXTRA_KEY);
} else {
throw new IllegalArgumentException("Put habit in the intent extras to be able to see details");
}
}
private void editHabit() {
Intent intent = new Intent(this, EditHabitActivity.class);
intent.putExtra(EditHabitActivity.EDIT_HABIT_EXTRA_KEY, mHabit);
startActivityForResult(intent, RC_EDIT_HABIT);
}
@OnClick(R.id.bt_increase)
void onIncreaseScoreClick() {
final int oldScore = mHabit.getRecord().getScore();
mHabit.increaseScore();
updateScoreIfNeeded(oldScore);
}
@OnClick(R.id.bt_decrease)
void onDecreaseClick() {
final int oldScore = mHabit.getRecord().getScore();
mHabit.decreaseScore();
updateScoreIfNeeded(oldScore);
}
private void updateScoreIfNeeded(int oldValue) {
if (oldValue != mHabit.getRecord().getScore()) {
updateUI();
FirebaseSyncUtils.applyChangesForHabit(mHabit);
}
}
private void delete() {
new AlertDialog.Builder(this)
.setTitle(R.string.action_delete)
.setMessage(R.string.delete_habit_message)
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
FirebaseSyncUtils.deleteHabit(mHabit);
finish();
}
})
.setNegativeButton(android.R.string.no, null)
.show();
}
private void configureDateSpinner() {
List<String> dateRanges = HabitoBarChartRange.allStringValues(this);
ArrayAdapter<String> resetAdapter = new ArrayAdapter<>(this, android.R.layout.simple_spinner_dropdown_item,
dateRanges);
dateRangeSpinner.setAdapter(resetAdapter);
dateRangeSpinner.setSelection(dateRanges.indexOf(mBarChartRange.stringValue(this)));
dateRangeSpinner.setOnItemSelectedListener(this);
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
String selected = parent.getItemAtPosition(position).toString();
if (!selected.equals(mBarChartRange.stringValue(this))) {
mBarChartRange = HabitoBarChartRange.DateRange.fromString(selected, this);
mViewModel.setDateRange(mBarChartRange);
updateUI();
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
@Override
public Loader<HabitoBarChartDataSource> onCreateLoader(int id, Bundle args) {
return new HabitoBarChartDataLoader(this, mHabit, mBarChartRange);
}
@Override
public void onLoadFinished(Loader<HabitoBarChartDataSource> loader,
HabitoBarChartDataSource dataSource) {
HabitoBarChartViewModel viewModel = new HabitoBarChartViewModel(mHabit, mBarChartRange);
mBarChartConfigurator.setup(dataSource, viewModel);
barChart.animateY(1000);
}
@Override
public void onLoaderReset(Loader<HabitoBarChartDataSource> loader) {
barChart.clear();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.joda;
import org.elasticsearch.common.Strings;
import org.joda.time.Chronology;
import org.joda.time.DateTime;
import org.joda.time.DateTimeField;
import org.joda.time.DateTimeFieldType;
import org.joda.time.DateTimeZone;
import org.joda.time.DurationField;
import org.joda.time.DurationFieldType;
import org.joda.time.ReadablePartial;
import org.joda.time.field.DividedDateTimeField;
import org.joda.time.field.OffsetDateTimeField;
import org.joda.time.field.ScaledDurationField;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import org.joda.time.format.DateTimeParserBucket;
import org.joda.time.format.DateTimePrinter;
import org.joda.time.format.ISODateTimeFormat;
import org.joda.time.format.StrictISODateTimeFormat;
import java.io.IOException;
import java.io.Writer;
import java.util.Locale;
public class Joda {
public static FormatDateTimeFormatter forPattern(String input) {
return forPattern(input, Locale.ROOT);
}
/**
* Parses a joda based pattern, including some named ones (similar to the built in Joda ISO ones).
*/
public static FormatDateTimeFormatter forPattern(String input, Locale locale) {
if (Strings.hasLength(input)) {
input = input.trim();
}
if (input == null || input.length() == 0) {
throw new IllegalArgumentException("No date pattern provided");
}
DateTimeFormatter formatter;
if ("basicDate".equals(input) || "basic_date".equals(input)) {
formatter = ISODateTimeFormat.basicDate();
} else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) {
formatter = ISODateTimeFormat.basicDateTime();
} else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.basicDateTimeNoMillis();
} else if ("basicOrdinalDate".equals(input) || "basic_ordinal_date".equals(input)) {
formatter = ISODateTimeFormat.basicOrdinalDate();
} else if ("basicOrdinalDateTime".equals(input) || "basic_ordinal_date_time".equals(input)) {
formatter = ISODateTimeFormat.basicOrdinalDateTime();
} else if ("basicOrdinalDateTimeNoMillis".equals(input) || "basic_ordinal_date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.basicOrdinalDateTimeNoMillis();
} else if ("basicTime".equals(input) || "basic_time".equals(input)) {
formatter = ISODateTimeFormat.basicTime();
} else if ("basicTimeNoMillis".equals(input) || "basic_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.basicTimeNoMillis();
} else if ("basicTTime".equals(input) || "basic_t_Time".equals(input)) {
formatter = ISODateTimeFormat.basicTTime();
} else if ("basicTTimeNoMillis".equals(input) || "basic_t_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.basicTTimeNoMillis();
} else if ("basicWeekDate".equals(input) || "basic_week_date".equals(input)) {
formatter = ISODateTimeFormat.basicWeekDate();
} else if ("basicWeekDateTime".equals(input) || "basic_week_date_time".equals(input)) {
formatter = ISODateTimeFormat.basicWeekDateTime();
} else if ("basicWeekDateTimeNoMillis".equals(input) || "basic_week_date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.basicWeekDateTimeNoMillis();
} else if ("date".equals(input)) {
formatter = ISODateTimeFormat.date();
} else if ("dateHour".equals(input) || "date_hour".equals(input)) {
formatter = ISODateTimeFormat.dateHour();
} else if ("dateHourMinute".equals(input) || "date_hour_minute".equals(input)) {
formatter = ISODateTimeFormat.dateHourMinute();
} else if ("dateHourMinuteSecond".equals(input) || "date_hour_minute_second".equals(input)) {
formatter = ISODateTimeFormat.dateHourMinuteSecond();
} else if ("dateHourMinuteSecondFraction".equals(input) || "date_hour_minute_second_fraction".equals(input)) {
formatter = ISODateTimeFormat.dateHourMinuteSecondFraction();
} else if ("dateHourMinuteSecondMillis".equals(input) || "date_hour_minute_second_millis".equals(input)) {
formatter = ISODateTimeFormat.dateHourMinuteSecondMillis();
} else if ("dateOptionalTime".equals(input) || "date_optional_time".equals(input)) {
// in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print
// this sucks we should use the root local by default and not be dependent on the node
return new FormatDateTimeFormatter(input,
ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC),
ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC), locale);
} else if ("dateTime".equals(input) || "date_time".equals(input)) {
formatter = ISODateTimeFormat.dateTime();
} else if ("dateTimeNoMillis".equals(input) || "date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.dateTimeNoMillis();
} else if ("hour".equals(input)) {
formatter = ISODateTimeFormat.hour();
} else if ("hourMinute".equals(input) || "hour_minute".equals(input)) {
formatter = ISODateTimeFormat.hourMinute();
} else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) {
formatter = ISODateTimeFormat.hourMinuteSecond();
} else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) {
formatter = ISODateTimeFormat.hourMinuteSecondFraction();
} else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) {
formatter = ISODateTimeFormat.hourMinuteSecondMillis();
} else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) {
formatter = ISODateTimeFormat.ordinalDate();
} else if ("ordinalDateTime".equals(input) || "ordinal_date_time".equals(input)) {
formatter = ISODateTimeFormat.ordinalDateTime();
} else if ("ordinalDateTimeNoMillis".equals(input) || "ordinal_date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.ordinalDateTimeNoMillis();
} else if ("time".equals(input)) {
formatter = ISODateTimeFormat.time();
} else if ("timeNoMillis".equals(input) || "time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.timeNoMillis();
} else if ("tTime".equals(input) || "t_time".equals(input)) {
formatter = ISODateTimeFormat.tTime();
} else if ("tTimeNoMillis".equals(input) || "t_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.tTimeNoMillis();
} else if ("weekDate".equals(input) || "week_date".equals(input)) {
formatter = ISODateTimeFormat.weekDate();
} else if ("weekDateTime".equals(input) || "week_date_time".equals(input)) {
formatter = ISODateTimeFormat.weekDateTime();
} else if ("weekDateTimeNoMillis".equals(input) || "week_date_time_no_millis".equals(input)) {
formatter = ISODateTimeFormat.weekDateTimeNoMillis();
} else if ("weekyear".equals(input) || "week_year".equals(input)) {
formatter = ISODateTimeFormat.weekyear();
} else if ("weekyearWeek".equals(input) || "weekyear_week".equals(input)) {
formatter = ISODateTimeFormat.weekyearWeek();
} else if ("weekyearWeekDay".equals(input) || "weekyear_week_day".equals(input)) {
formatter = ISODateTimeFormat.weekyearWeekDay();
} else if ("year".equals(input)) {
formatter = ISODateTimeFormat.year();
} else if ("yearMonth".equals(input) || "year_month".equals(input)) {
formatter = ISODateTimeFormat.yearMonth();
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
formatter = ISODateTimeFormat.yearMonthDay();
} else if ("epoch_second".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter();
} else if ("epoch_millis".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter();
// strict date formats here, must be at least 4 digits for year and two for months and two for day
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
formatter = StrictISODateTimeFormat.basicWeekDate();
} else if ("strictBasicWeekDateTime".equals(input) || "strict_basic_week_date_time".equals(input)) {
formatter = StrictISODateTimeFormat.basicWeekDateTime();
} else if ("strictBasicWeekDateTimeNoMillis".equals(input) || "strict_basic_week_date_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.basicWeekDateTimeNoMillis();
} else if ("strictDate".equals(input) || "strict_date".equals(input)) {
formatter = StrictISODateTimeFormat.date();
} else if ("strictDateHour".equals(input) || "strict_date_hour".equals(input)) {
formatter = StrictISODateTimeFormat.dateHour();
} else if ("strictDateHourMinute".equals(input) || "strict_date_hour_minute".equals(input)) {
formatter = StrictISODateTimeFormat.dateHourMinute();
} else if ("strictDateHourMinuteSecond".equals(input) || "strict_date_hour_minute_second".equals(input)) {
formatter = StrictISODateTimeFormat.dateHourMinuteSecond();
} else if ("strictDateHourMinuteSecondFraction".equals(input) || "strict_date_hour_minute_second_fraction".equals(input)) {
formatter = StrictISODateTimeFormat.dateHourMinuteSecondFraction();
} else if ("strictDateHourMinuteSecondMillis".equals(input) || "strict_date_hour_minute_second_millis".equals(input)) {
formatter = StrictISODateTimeFormat.dateHourMinuteSecondMillis();
} else if ("strictDateOptionalTime".equals(input) || "strict_date_optional_time".equals(input)) {
// in this case, we have a separate parser and printer since the dataOptionalTimeParser can't print
// this sucks we should use the root local by default and not be dependent on the node
return new FormatDateTimeFormatter(input,
StrictISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC),
StrictISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC), locale);
} else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) {
formatter = StrictISODateTimeFormat.dateTime();
} else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.dateTimeNoMillis();
} else if ("strictHour".equals(input) || "strict_hour".equals(input)) {
formatter = StrictISODateTimeFormat.hour();
} else if ("strictHourMinute".equals(input) || "strict_hour_minute".equals(input)) {
formatter = StrictISODateTimeFormat.hourMinute();
} else if ("strictHourMinuteSecond".equals(input) || "strict_hour_minute_second".equals(input)) {
formatter = StrictISODateTimeFormat.hourMinuteSecond();
} else if ("strictHourMinuteSecondFraction".equals(input) || "strict_hour_minute_second_fraction".equals(input)) {
formatter = StrictISODateTimeFormat.hourMinuteSecondFraction();
} else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) {
formatter = StrictISODateTimeFormat.hourMinuteSecondMillis();
} else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) {
formatter = StrictISODateTimeFormat.ordinalDate();
} else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) {
formatter = StrictISODateTimeFormat.ordinalDateTime();
} else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.ordinalDateTimeNoMillis();
} else if ("strictTime".equals(input) || "strict_time".equals(input)) {
formatter = StrictISODateTimeFormat.time();
} else if ("strictTimeNoMillis".equals(input) || "strict_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.timeNoMillis();
} else if ("strictTTime".equals(input) || "strict_t_time".equals(input)) {
formatter = StrictISODateTimeFormat.tTime();
} else if ("strictTTimeNoMillis".equals(input) || "strict_t_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.tTimeNoMillis();
} else if ("strictWeekDate".equals(input) || "strict_week_date".equals(input)) {
formatter = StrictISODateTimeFormat.weekDate();
} else if ("strictWeekDateTime".equals(input) || "strict_week_date_time".equals(input)) {
formatter = StrictISODateTimeFormat.weekDateTime();
} else if ("strictWeekDateTimeNoMillis".equals(input) || "strict_week_date_time_no_millis".equals(input)) {
formatter = StrictISODateTimeFormat.weekDateTimeNoMillis();
} else if ("strictWeekyear".equals(input) || "strict_weekyear".equals(input)) {
formatter = StrictISODateTimeFormat.weekyear();
} else if ("strictWeekyearWeek".equals(input) || "strict_weekyear_week".equals(input)) {
formatter = StrictISODateTimeFormat.weekyearWeek();
} else if ("strictWeekyearWeekDay".equals(input) || "strict_weekyear_week_day".equals(input)) {
formatter = StrictISODateTimeFormat.weekyearWeekDay();
} else if ("strictYear".equals(input) || "strict_year".equals(input)) {
formatter = StrictISODateTimeFormat.year();
} else if ("strictYearMonth".equals(input) || "strict_year_month".equals(input)) {
formatter = StrictISODateTimeFormat.yearMonth();
} else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) {
formatter = StrictISODateTimeFormat.yearMonthDay();
} else if (Strings.hasLength(input) && input.contains("||")) {
String[] formats = Strings.delimitedListToStringArray(input, "||");
DateTimeParser[] parsers = new DateTimeParser[formats.length];
if (formats.length == 1) {
formatter = forPattern(input, locale).parser();
} else {
DateTimeFormatter dateTimeFormatter = null;
for (int i = 0; i < formats.length; i++) {
FormatDateTimeFormatter currentFormatter = forPattern(formats[i], locale);
DateTimeFormatter currentParser = currentFormatter.parser();
if (dateTimeFormatter == null) {
dateTimeFormatter = currentFormatter.printer();
}
parsers[i] = currentParser.getParser();
}
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(dateTimeFormatter.withZone(DateTimeZone.UTC).getPrinter(), parsers);
formatter = builder.toFormatter();
}
} else {
try {
formatter = DateTimeFormat.forPattern(input);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e);
}
}
return new FormatDateTimeFormatter(input, formatter.withZone(DateTimeZone.UTC), locale);
}
public static FormatDateTimeFormatter getStrictStandardDateFormatter() {
// 2014/10/10
DateTimeFormatter shortFormatter = new DateTimeFormatterBuilder()
.appendFixedDecimal(DateTimeFieldType.year(), 4)
.appendLiteral('/')
.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2)
.appendLiteral('/')
.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2)
.toFormatter()
.withZoneUTC();
// 2014/10/10 12:12:12
DateTimeFormatter longFormatter = new DateTimeFormatterBuilder()
.appendFixedDecimal(DateTimeFieldType.year(), 4)
.appendLiteral('/')
.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2)
.appendLiteral('/')
.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2)
.appendLiteral(' ')
.appendFixedSignedDecimal(DateTimeFieldType.hourOfDay(), 2)
.appendLiteral(':')
.appendFixedSignedDecimal(DateTimeFieldType.minuteOfHour(), 2)
.appendLiteral(':')
.appendFixedSignedDecimal(DateTimeFieldType.secondOfMinute(), 2)
.toFormatter()
.withZoneUTC();
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
}
public static final DurationFieldType Quarters = new DurationFieldType("quarters") {
@Override
public DurationField getField(Chronology chronology) {
return new ScaledDurationField(chronology.months(), Quarters, 3);
}
};
public static final DateTimeFieldType QuarterOfYear = new DateTimeFieldType("quarterOfYear") {
@Override
public DurationFieldType getDurationType() {
return Quarters;
}
@Override
public DurationFieldType getRangeDurationType() {
return DurationFieldType.years();
}
@Override
public DateTimeField getField(Chronology chronology) {
return new OffsetDateTimeField(new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
}
};
public static class EpochTimeParser implements DateTimeParser {
private final boolean hasMilliSecondPrecision;
public EpochTimeParser(boolean hasMilliSecondPrecision) {
this.hasMilliSecondPrecision = hasMilliSecondPrecision;
}
@Override
public int estimateParsedLength() {
return hasMilliSecondPrecision ? 19 : 16;
}
@Override
public int parseInto(DateTimeParserBucket bucket, String text, int position) {
boolean isPositive = text.startsWith("-") == false;
boolean isTooLong = text.length() > estimateParsedLength();
if ((isPositive && isTooLong) ||
// timestamps have to have UTC timezone
bucket.getZone() != DateTimeZone.UTC) {
return -1;
}
int factor = hasMilliSecondPrecision ? 1 : 1000;
try {
long millis = Long.valueOf(text) * factor;
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
bucket.saveField(DateTimeFieldType.year(), dt.getYear());
bucket.saveField(DateTimeFieldType.monthOfYear(), dt.getMonthOfYear());
bucket.saveField(DateTimeFieldType.dayOfMonth(), dt.getDayOfMonth());
bucket.saveField(DateTimeFieldType.hourOfDay(), dt.getHourOfDay());
bucket.saveField(DateTimeFieldType.minuteOfHour(), dt.getMinuteOfHour());
bucket.saveField(DateTimeFieldType.secondOfMinute(), dt.getSecondOfMinute());
bucket.saveField(DateTimeFieldType.millisOfSecond(), dt.getMillisOfSecond());
bucket.setZone(DateTimeZone.UTC);
} catch (Exception e) {
return -1;
}
return text.length();
}
}
public static class EpochTimePrinter implements DateTimePrinter {
private boolean hasMilliSecondPrecision;
public EpochTimePrinter(boolean hasMilliSecondPrecision) {
this.hasMilliSecondPrecision = hasMilliSecondPrecision;
}
@Override
public int estimatePrintedLength() {
return hasMilliSecondPrecision ? 19 : 16;
}
/**
* We adjust the instant by displayOffset to adjust for the offset that might have been added in
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
public void printTo(StringBuffer buf, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) {
if (hasMilliSecondPrecision) {
buf.append(instant - displayOffset);
} else {
buf.append((instant - displayOffset) / 1000);
}
}
/**
* We adjust the instant by displayOffset to adjust for the offset that might have been added in
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
public void printTo(Writer out, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.write(String.valueOf(instant - displayOffset));
} else {
out.append(String.valueOf((instant - displayOffset) / 1000));
}
}
@Override
public void printTo(StringBuffer buf, ReadablePartial partial, Locale locale) {
if (hasMilliSecondPrecision) {
buf.append(String.valueOf(getDateTimeMillis(partial)));
} else {
buf.append(String.valueOf(getDateTimeMillis(partial) / 1000));
}
}
@Override
public void printTo(Writer out, ReadablePartial partial, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.append(String.valueOf(getDateTimeMillis(partial)));
} else {
out.append(String.valueOf(getDateTimeMillis(partial) / 1000));
}
}
private long getDateTimeMillis(ReadablePartial partial) {
int year = partial.get(DateTimeFieldType.year());
int monthOfYear = partial.get(DateTimeFieldType.monthOfYear());
int dayOfMonth = partial.get(DateTimeFieldType.dayOfMonth());
int hourOfDay = partial.get(DateTimeFieldType.hourOfDay());
int minuteOfHour = partial.get(DateTimeFieldType.minuteOfHour());
int secondOfMinute = partial.get(DateTimeFieldType.secondOfMinute());
int millisOfSecond = partial.get(DateTimeFieldType.millisOfSecond());
return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth, hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
}
}
}
| |
package org.andengine.opengl.texture.region;
import org.andengine.opengl.texture.ITexture;
import org.andengine.opengl.texture.Texture;
/**
* (c) 2010 Nicolas Gramlich
* (c) 2011 Zynga Inc.
*
* @author Nicolas Gramlich
* @since 18:14:42 - 09.03.2010
*/
public class TiledTextureRegion extends BaseTextureRegion implements ITiledTextureRegion {
// ===========================================================
// Constants
// ===========================================================
// ===========================================================
// Fields
// ===========================================================
protected int mCurrentTileIndex;
protected final int mTileCount;
protected final ITextureRegion[] mTextureRegions;
// ===========================================================
// Constructors
// ===========================================================
public TiledTextureRegion(final ITexture pTexture, final ITextureRegion ... pTextureRegions) {
this(pTexture, true, pTextureRegions);
}
/**
* @param pTexture
* @param pPerformSameTextureSanityCheck checks whether all supplied {@link ITextureRegion} are on the same {@link Texture}
* @param pTextureRegions
*/
public TiledTextureRegion(final ITexture pTexture, final boolean pPerformSameTextureSanityCheck, final ITextureRegion ... pTextureRegions) {
super(pTexture);
this.mTextureRegions = pTextureRegions;
this.mTileCount = this.mTextureRegions.length;
if (pPerformSameTextureSanityCheck) {
for (int i = this.mTileCount - 1; i >= 0; i--) {
if (pTextureRegions[i].getTexture() != pTexture) {
throw new IllegalArgumentException("The " + ITextureRegion.class.getSimpleName() + ": '" + pTextureRegions[i].toString() + "' at index: '" + i + "' is not on the same " + ITexture.class.getSimpleName() + ": '" + pTextureRegions[i].getTexture().toString() + "' as the supplied " + ITexture.class.getSimpleName() + ": '" + pTexture.toString() + "'.");
}
}
}
}
public static TiledTextureRegion create(final ITexture pTexture, final int pTextureX, final int pTextureY, final int pTextureWidth, final int pTextureHeight, final int pTileColumns, final int pTileRows) {
return TiledTextureRegion.create(pTexture, pTextureX, pTextureY, pTextureWidth, pTextureHeight, pTileColumns, pTileRows, false);
}
public static TiledTextureRegion create(final ITexture pTexture, final int pTextureX, final int pTextureY, final int pTextureWidth, final int pTextureHeight, final int pTileColumns, final int pTileRows, final boolean pRotated) {
final ITextureRegion[] textureRegions = new ITextureRegion[pTileColumns * pTileRows];
final int tileWidth = pTextureWidth / pTileColumns;
final int tileHeight = pTextureHeight / pTileRows;
for (int tileColumn = 0; tileColumn < pTileColumns; tileColumn++) {
for (int tileRow = 0; tileRow < pTileRows; tileRow++) {
final int tileIndex = tileRow * pTileColumns + tileColumn;
final int x = pTextureX + tileColumn * tileWidth;
final int y = pTextureY + tileRow * tileHeight;
textureRegions[tileIndex] = new TextureRegion(pTexture, x, y, tileWidth, tileHeight, pRotated);
}
}
return new TiledTextureRegion(pTexture, false, textureRegions);
}
@Override
public TiledTextureRegion deepCopy() {
final int tileCount = this.mTileCount;
final ITextureRegion[] textureRegions = new ITextureRegion[tileCount];
for (int i = 0; i < tileCount; i++) {
textureRegions[i] = this.mTextureRegions[i].deepCopy();
}
return new TiledTextureRegion(this.mTexture, false, textureRegions);
}
// ===========================================================
// Getter & Setter
// ===========================================================
@Override
public int getCurrentTileIndex() {
return this.mCurrentTileIndex;
}
@Override
public void setCurrentTileIndex(final int pCurrentTileIndex) {
this.mCurrentTileIndex = pCurrentTileIndex;
}
@Override
public void nextTile() {
this.mCurrentTileIndex++;
if (this.mCurrentTileIndex >= this.mTileCount) {
this.mCurrentTileIndex = this.mCurrentTileIndex % this.mTileCount;
}
}
@Override
public ITextureRegion getTextureRegion(final int pTileIndex) {
return this.mTextureRegions[pTileIndex];
}
@Override
public int getTileCount() {
return this.mTileCount;
}
@Override
public float getTextureX() {
return this.mTextureRegions[this.mCurrentTileIndex].getTextureX();
}
@Override
public float getTextureX(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getTextureX();
}
@Override
public float getTextureY() {
return this.mTextureRegions[this.mCurrentTileIndex].getTextureY();
}
@Override
public float getTextureY(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getTextureY();
}
@Override
public void setTextureX(final float pTextureX) {
this.mTextureRegions[this.mCurrentTileIndex].setTextureX(pTextureX);
}
@Override
public void setTextureX(final int pTileIndex, final float pTextureX) {
this.mTextureRegions[pTileIndex].setTextureX(pTextureX);
}
@Override
public void setTextureY(final float pTextureY) {
this.mTextureRegions[this.mCurrentTileIndex].setTextureY(pTextureY);
}
@Override
public void setTextureY(final int pTileIndex, final float pTextureY) {
this.mTextureRegions[pTileIndex].setTextureY(pTextureY);
}
@Override
public void setTexturePosition(final float pTextureX, final float pTextureY) {
this.mTextureRegions[this.mCurrentTileIndex].setTexturePosition(pTextureX, pTextureY);
}
@Override
public void setTexturePosition(final int pTileIndex, final float pTextureX, final float pTextureY) {
this.mTextureRegions[pTileIndex].setTexturePosition(pTextureX, pTextureY);
}
@Override
public float getWidth() {
return this.mTextureRegions[this.mCurrentTileIndex].getWidth();
}
@Override
public float getWidth(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getWidth();
}
@Override
public float getHeight() {
return this.mTextureRegions[this.mCurrentTileIndex].getHeight();
}
@Override
public float getHeight(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getHeight();
}
@Override
public void setTextureWidth(final float pTextureWidth) {
this.mTextureRegions[this.mCurrentTileIndex].setTextureWidth(pTextureWidth);
}
@Override
public void setTextureWidth(final int pTileIndex, final float pTextureWidth) {
this.mTextureRegions[pTileIndex].setTextureWidth(pTextureWidth);
}
@Override
public void setTextureHeight(final float pTextureHeight) {
this.mTextureRegions[this.mCurrentTileIndex].setTextureHeight(pTextureHeight);
}
@Override
public void setTextureHeight(final int pTileIndex, final float pTextureHeight) {
this.mTextureRegions[pTileIndex].setTextureHeight(pTextureHeight);
}
@Override
public void setTextureSize(final float pTextureWidth, final float pTextureHeight) {
this.mTextureRegions[this.mCurrentTileIndex].setTextureSize(pTextureWidth, pTextureHeight);
}
@Override
public void setTextureSize(final int pTileIndex, final float pTextureWidth, final float pTextureHeight) {
this.mTextureRegions[pTileIndex].setTextureSize(pTextureWidth, pTextureHeight);
}
@Override
public void set(final float pTextureX, final float pTextureY, final float pTextureWidth, final float pTextureHeight) {
this.mTextureRegions[this.mCurrentTileIndex].set(pTextureX, pTextureY, pTextureWidth, pTextureHeight);
}
@Override
public void set(final int pTileIndex, final float pTextureX, final float pTextureY, final float pTextureWidth, final float pTextureHeight) {
this.mTextureRegions[pTileIndex].set(pTextureX, pTextureY, pTextureWidth, pTextureHeight);
}
@Override
public float getU() {
return this.mTextureRegions[this.mCurrentTileIndex].getU();
}
@Override
public float getU(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getU();
}
@Override
public float getV() {
return this.mTextureRegions[this.mCurrentTileIndex].getV();
}
@Override
public float getV(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getV();
}
@Override
public float getU2() {
return this.mTextureRegions[this.mCurrentTileIndex].getU2();
}
@Override
public float getU2(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getU2();
}
@Override
public float getV2() {
return this.mTextureRegions[this.mCurrentTileIndex].getV2();
}
@Override
public float getV2(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getV2();
}
@Override
public boolean isScaled() {
return this.mTextureRegions[this.mCurrentTileIndex].isScaled();
}
@Override
public boolean isScaled(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].isScaled();
}
@Override
public float getScale() {
return this.mTextureRegions[this.mCurrentTileIndex].getScale();
}
@Override
public float getScale(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].getScale();
}
@Override
public boolean isRotated() {
return this.mTextureRegions[this.mCurrentTileIndex].isRotated();
}
@Override
public boolean isRotated(final int pTileIndex) {
return this.mTextureRegions[pTileIndex].isRotated();
}
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
// ===========================================================
// Methods
// ===========================================================
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
| |
/*
* Copyright 2007-2008 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kns.lookup;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.core.web.format.DateFormatter;
import org.kuali.rice.kns.document.authorization.BusinessObjectRestrictions;
import org.kuali.rice.kns.document.authorization.FieldRestriction;
import org.kuali.rice.kns.service.BusinessObjectAuthorizationService;
import org.kuali.rice.kns.service.KNSServiceLocator;
import org.kuali.rice.krad.service.KRADServiceLocatorWeb;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.ObjectUtils;
import java.io.Serializable;
import java.sql.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* This class holds details of html data for an action url.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
@Deprecated
public abstract class HtmlData implements Serializable {
protected static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(HtmlData.class);
public static final String ANCHOR_HTML_DATA_TYPE = AnchorHtmlData.class.getName();
public static final String INPUT_HTML_DATA_TYPE = InputHtmlData.class.getName();
protected String name = "";
protected String title = "";
protected String methodToCall = "";
protected String displayText = "";
protected String prependDisplayText = "";
protected String appendDisplayText = "";
protected List<HtmlData> childUrlDataList;
protected String maxLength;
/**
*
* This method constructs the complete html tag based on the class attribute
* values.
*
* @return
*/
public abstract String constructCompleteHtmlTag();
/**
* @return the appendDisplayText
*/
public String getAppendDisplayText() {
return this.appendDisplayText;
}
/**
* @param appendDisplayText the appendDisplayText to set
*/
public void setAppendDisplayText(String appendDisplayText) {
this.appendDisplayText = appendDisplayText;
}
/**
* @return the childUrlDataList
*/
public List<HtmlData> getChildUrlDataList() {
return this.childUrlDataList;
}
/**
* @param childUrlDataList the childUrlDataList to set
*/
public void setChildUrlDataList(List<HtmlData> childUrlDataList) {
this.childUrlDataList = childUrlDataList;
}
/**
* @return the prependDisplayText
*/
public String getPrependDisplayText() {
return this.prependDisplayText;
}
/**
* @param prependDisplayText the prependDisplayText to set
*/
public void setPrependDisplayText(String prependDisplayText) {
this.prependDisplayText = prependDisplayText;
}
/**
* @return the title
*/
public String getTitle() {
return this.title;
}
/**
* @param title the title to set
*/
public void setTitle(String title) {
this.title = title;
}
/**
* @return the name
*/
public String getName() {
return this.name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the displayText
*/
public String getDisplayText() {
return this.displayText;
}
/**
* @param displayText the displayText to set
*/
public void setDisplayText(String displayText) {
this.displayText = displayText;
}
/**
* @return the methodToCall
*/
public String getMethodToCall() {
return this.methodToCall;
}
/**
* @param methodToCall the methodToCall to set
*/
public void setMethodToCall(String methodToCall) {
this.methodToCall = methodToCall;
}
public String getTitle(String prependText, Class bo, List keys) {
return KRADConstants.EMPTY_STRING;
}
/**
* KFSMI-658 This method gets the title text for a link/control
*
* @param prependText
* @param dataObject
* @param fieldConversions
* @param returnKeys
* @return title text
*/
public static String getTitleText(String prependText, Object dataObject, List<String> keys, BusinessObjectRestrictions businessObjectRestrictions) {
if (dataObject == null)
return KRADConstants.EMPTY_STRING;
Map<String, String> keyValueMap = new HashMap<String, String>();
Iterator keysIt = keys.iterator();
while (keysIt.hasNext()) {
String fieldNm = (String) keysIt.next();
Object fieldVal = ObjectUtils.getPropertyValue(dataObject, fieldNm);
FieldRestriction fieldRestriction = null;
if (businessObjectRestrictions != null) {
fieldRestriction = businessObjectRestrictions.getFieldRestriction(fieldNm);
}
if (fieldRestriction != null && (fieldRestriction.isMasked() || fieldRestriction.isPartiallyMasked())) {
fieldVal = fieldRestriction.getMaskFormatter().maskValue(fieldVal);
} else if (fieldVal == null) {
fieldVal = KRADConstants.EMPTY_STRING;
} else if (fieldVal instanceof Date) {
// need to format date in url
DateFormatter dateFormatter = new DateFormatter();
fieldVal = dateFormatter.format(fieldVal);
}
keyValueMap.put(fieldNm, fieldVal.toString());
}
return getTitleText(prependText, dataObject.getClass(), keyValueMap);
}
private static BusinessObjectAuthorizationService businessObjectAuthorizationService;
private static BusinessObjectAuthorizationService getBusinessObjectAuthorizationService() {
if (businessObjectAuthorizationService == null) {
businessObjectAuthorizationService = KNSServiceLocator.getBusinessObjectAuthorizationService();
}
return businessObjectAuthorizationService;
}
public static String getTitleText(String prependText, Class<?> dataObjectClass, Map<String, String> keyValueMap) {
StringBuffer titleText = new StringBuffer(prependText);
for (String key : keyValueMap.keySet()) {
String fieldVal = keyValueMap.get(key).toString();
titleText.append(KRADServiceLocatorWeb.getDataDictionaryService()
.getAttributeLabel(dataObjectClass, key)
+ "=" + fieldVal.toString() + " ");
}
return titleText.toString();
}
/**
*
* This class is an extension of HtmlData. It represents an anchor tag.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
public static class AnchorHtmlData extends HtmlData {
public static final String TARGET_BLANK = "_blank";
protected String href = "";
protected String target = "";
protected String style = "";
protected String styleClass ="";
protected String onclick ="";
/**
* Needed by inquiry framework
*/
public AnchorHtmlData() {
}
public AnchorHtmlData(String href, String title) {
this.href = href;
this.title = title;
}
public AnchorHtmlData(String href, String methodToCall,
String displayText) {
this.href = href;
this.methodToCall = methodToCall;
this.displayText = displayText;
}
/**
* @param href the href to set
*/
public void setHref(String href) {
this.href = href;
}
/**
*
* This method generates anchor tag.
*
* @see HtmlData#constructCompleteHtmlTag()
*/
public String constructCompleteHtmlTag() {
String completeHtmlTag;
if (StringUtils.isEmpty(getHref()))
completeHtmlTag = getDisplayText();
else
completeHtmlTag = getPrependDisplayText()
+ "<a title=\""
+ title
+ "\""
+ " href=\""
+ getHref()
+ "\""
+ getStyle()
+ " "
+ getStyleClass()
+ " "
+ (StringUtils.isEmpty(getOnclick()) ? "" : " onClick=\""
+ getOnclick() + "\" ")
+ (StringUtils.isEmpty(getTarget()) ? "" : " target=\""
+ getTarget() + "\" ") + ">" + getDisplayText()
+ "</a>" + getAppendDisplayText();
return completeHtmlTag;
}
/**
* @return the target
*/
public String getTarget() {
return this.target;
}
/**
* @param target
* the target to set
*/
public void setTarget(String target) {
this.target = target;
}
/**
* @return the style
*/
public String getStyle() {
return this.style;
}
/**
* @param style the style to set
*/
public void setStyle(String style) {
this.style = style;
}
/**
* @return the styleClass
*/
public String getStyleClass() {
return this.styleClass;
}
/**
* @param styleClass the styleClass to set
*/
public void setStyleClass(String styleClass) {
this.styleClass = styleClass;
}
/**
* @return the onclick
*/
public String getOnclick() {
return this.onclick;
}
/**
* @param onclick the onclick to set
*/
public void setOnclick(String onclick) {
this.onclick = onclick;
}
/**
* @return the href
*/
public String getHref() {
return this.href;
}
/**
* @return the methodToCall
*/
public String getMethodToCall() {
return this.methodToCall;
}
}
/**
*
* This class is an extension of HtmlData. It represents an input tag.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
public static class InputHtmlData extends HtmlData {
public static final String CHECKBOX_INPUT_TYPE = "checkbox";
public static final String CHECKBOX_CHECKED_VALUE = "checked";
protected String inputType = "";
protected String src = "";
protected String styleClass = "";
protected String border = "0";
protected String checked = "";
protected String value = "";
public InputHtmlData(String name, String inputType) {
this.name = name;
this.inputType = inputType;
}
public InputHtmlData(String name, String inputType, String src) {
this.name = name;
this.inputType = inputType;
this.src = src;
}
/***********************************************************************
*
* This method contructs an input tag.
*
* @see HtmlData#constructCompleteHtmlTag()
*/
public String constructCompleteHtmlTag() {
return getPrependDisplayText()
+ "<input title=\""
+ title
+ "\""
+ " name=\""
+ getName()
+ "\""
+ (StringUtils.isEmpty(src) ? ""
: " src=\"" + src + "\" ")
+ " type=\""
+ getInputType()
+ "\""
+ (StringUtils.isEmpty(value) ? ""
: " value=\"" + value + "\" ")
+ (StringUtils.isEmpty(checked) ? ""
: " checked=\"" + checked + "\" ")
+ (StringUtils.isEmpty(getStyleClass()) ? ""
: " styleClass=\"" + getStyleClass() + "\" ")
+ " border=\"" + getBorder() + "\"" + " value=\""
+ getDisplayText() + "\"" + "/>" + getAppendDisplayText();
}
/**
* @return the inputType
*/
public String getInputType() {
return this.inputType;
}
/**
* @return the src
*/
public String getSrc() {
return this.src;
}
/**
* @return the border
*/
public String getBorder() {
return this.border;
}
/**
* @param border
* the border to set
*/
public void setBorder(String border) {
this.border = border;
}
/**
* @return the styleClass
*/
public String getStyleClass() {
return this.styleClass;
}
/**
* @param styleClass
* the styleClass to set
*/
public void setStyleClass(String styleClass) {
this.styleClass = styleClass;
}
/**
* @param checked the checked to set
*/
public void setChecked(String checked) {
this.checked = checked;
}
/**
* @param value the value to set
*/
public void setValue(String value) {
this.value = value;
}
}
public static class MultipleAnchorHtmlData extends AnchorHtmlData {
protected List<AnchorHtmlData> anchorHtmlData;
protected static final String ANCHORS_SEPARATOR = ", ";
/**
* Needed by inquiry framework
*/
public MultipleAnchorHtmlData(List<AnchorHtmlData> anchorHtmlData) {
this.anchorHtmlData = anchorHtmlData;
}
/**
*
* This method generates anchor tag.
*
* @see HtmlData#constructCompleteHtmlTag()
*/
public String constructCompleteHtmlTag() {
StringBuffer completeHtmlTag = new StringBuffer();
for(AnchorHtmlData anchor: anchorHtmlData){
completeHtmlTag.append(anchor.constructCompleteHtmlTag()+",");
}
if(completeHtmlTag.toString().endsWith(ANCHORS_SEPARATOR))
completeHtmlTag.delete(completeHtmlTag.length()-ANCHORS_SEPARATOR.length(), completeHtmlTag.length());
return completeHtmlTag.toString();
}
/**
* @return the anchorHtmlData
*/
public List<AnchorHtmlData> getAnchorHtmlData() {
return this.anchorHtmlData;
}
}
/**
* @return the maxLength
*/
public int getMaxLength() {
try{
return Integer.parseInt(this.maxLength);
} catch(Exception ex){
return -1;
}
}
/**
* @param maxLength the maxLength to set
*/
public void setMaxLength(String maxLength) {
this.maxLength = maxLength;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediastoredata.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediastore-data-2017-09-01/GetObject" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetObjectResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The bytes of the object.
* </p>
*/
private java.io.InputStream body;
/**
* <p>
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache behavior.
* Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.
* </p>
* <p>
* Headers with a custom user-defined value are also accepted.
* </p>
*/
private String cacheControl;
/**
* <p>
* The range of bytes to retrieve.
* </p>
*/
private String contentRange;
/**
* <p>
* The length of the object in bytes.
* </p>
*/
private Long contentLength;
/**
* <p>
* The content type of the object.
* </p>
*/
private String contentType;
/**
* <p>
* The ETag that represents a unique instance of the object.
* </p>
*/
private String eTag;
/**
* <p>
* The date and time that the object was last modified.
* </p>
*/
private java.util.Date lastModified;
/**
* <p>
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other status
* codes indicate the type of error that occurred.
* </p>
*/
private Integer statusCode;
/**
* <p>
* The bytes of the object.
* </p>
*
* @param body
* The bytes of the object.
*/
public void setBody(java.io.InputStream body) {
this.body = body;
}
/**
* <p>
* The bytes of the object.
* </p>
*
* @return The bytes of the object.
*/
public java.io.InputStream getBody() {
return this.body;
}
/**
* <p>
* The bytes of the object.
* </p>
*
* @param body
* The bytes of the object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withBody(java.io.InputStream body) {
setBody(body);
return this;
}
/**
* <p>
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache behavior.
* Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.
* </p>
* <p>
* Headers with a custom user-defined value are also accepted.
* </p>
*
* @param cacheControl
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache
* behavior. Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.</p>
* <p>
* Headers with a custom user-defined value are also accepted.
*/
public void setCacheControl(String cacheControl) {
this.cacheControl = cacheControl;
}
/**
* <p>
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache behavior.
* Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.
* </p>
* <p>
* Headers with a custom user-defined value are also accepted.
* </p>
*
* @return An optional <code>CacheControl</code> header that allows the caller to control the object's cache
* behavior. Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.</p>
* <p>
* Headers with a custom user-defined value are also accepted.
*/
public String getCacheControl() {
return this.cacheControl;
}
/**
* <p>
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache behavior.
* Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.
* </p>
* <p>
* Headers with a custom user-defined value are also accepted.
* </p>
*
* @param cacheControl
* An optional <code>CacheControl</code> header that allows the caller to control the object's cache
* behavior. Headers can be passed in as specified in the HTTP spec at <a
* href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9"
* >https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9</a>.</p>
* <p>
* Headers with a custom user-defined value are also accepted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withCacheControl(String cacheControl) {
setCacheControl(cacheControl);
return this;
}
/**
* <p>
* The range of bytes to retrieve.
* </p>
*
* @param contentRange
* The range of bytes to retrieve.
*/
public void setContentRange(String contentRange) {
this.contentRange = contentRange;
}
/**
* <p>
* The range of bytes to retrieve.
* </p>
*
* @return The range of bytes to retrieve.
*/
public String getContentRange() {
return this.contentRange;
}
/**
* <p>
* The range of bytes to retrieve.
* </p>
*
* @param contentRange
* The range of bytes to retrieve.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withContentRange(String contentRange) {
setContentRange(contentRange);
return this;
}
/**
* <p>
* The length of the object in bytes.
* </p>
*
* @param contentLength
* The length of the object in bytes.
*/
public void setContentLength(Long contentLength) {
this.contentLength = contentLength;
}
/**
* <p>
* The length of the object in bytes.
* </p>
*
* @return The length of the object in bytes.
*/
public Long getContentLength() {
return this.contentLength;
}
/**
* <p>
* The length of the object in bytes.
* </p>
*
* @param contentLength
* The length of the object in bytes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withContentLength(Long contentLength) {
setContentLength(contentLength);
return this;
}
/**
* <p>
* The content type of the object.
* </p>
*
* @param contentType
* The content type of the object.
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
/**
* <p>
* The content type of the object.
* </p>
*
* @return The content type of the object.
*/
public String getContentType() {
return this.contentType;
}
/**
* <p>
* The content type of the object.
* </p>
*
* @param contentType
* The content type of the object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withContentType(String contentType) {
setContentType(contentType);
return this;
}
/**
* <p>
* The ETag that represents a unique instance of the object.
* </p>
*
* @param eTag
* The ETag that represents a unique instance of the object.
*/
public void setETag(String eTag) {
this.eTag = eTag;
}
/**
* <p>
* The ETag that represents a unique instance of the object.
* </p>
*
* @return The ETag that represents a unique instance of the object.
*/
public String getETag() {
return this.eTag;
}
/**
* <p>
* The ETag that represents a unique instance of the object.
* </p>
*
* @param eTag
* The ETag that represents a unique instance of the object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withETag(String eTag) {
setETag(eTag);
return this;
}
/**
* <p>
* The date and time that the object was last modified.
* </p>
*
* @param lastModified
* The date and time that the object was last modified.
*/
public void setLastModified(java.util.Date lastModified) {
this.lastModified = lastModified;
}
/**
* <p>
* The date and time that the object was last modified.
* </p>
*
* @return The date and time that the object was last modified.
*/
public java.util.Date getLastModified() {
return this.lastModified;
}
/**
* <p>
* The date and time that the object was last modified.
* </p>
*
* @param lastModified
* The date and time that the object was last modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withLastModified(java.util.Date lastModified) {
setLastModified(lastModified);
return this;
}
/**
* <p>
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other status
* codes indicate the type of error that occurred.
* </p>
*
* @param statusCode
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other
* status codes indicate the type of error that occurred.
*/
public void setStatusCode(Integer statusCode) {
this.statusCode = statusCode;
}
/**
* <p>
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other status
* codes indicate the type of error that occurred.
* </p>
*
* @return The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other
* status codes indicate the type of error that occurred.
*/
public Integer getStatusCode() {
return this.statusCode;
}
/**
* <p>
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other status
* codes indicate the type of error that occurred.
* </p>
*
* @param statusCode
* The HTML status code of the request. Status codes ranging from 200 to 299 indicate success. All other
* status codes indicate the type of error that occurred.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetObjectResult withStatusCode(Integer statusCode) {
setStatusCode(statusCode);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBody() != null)
sb.append("Body: ").append(getBody()).append(",");
if (getCacheControl() != null)
sb.append("CacheControl: ").append(getCacheControl()).append(",");
if (getContentRange() != null)
sb.append("ContentRange: ").append(getContentRange()).append(",");
if (getContentLength() != null)
sb.append("ContentLength: ").append(getContentLength()).append(",");
if (getContentType() != null)
sb.append("ContentType: ").append(getContentType()).append(",");
if (getETag() != null)
sb.append("ETag: ").append(getETag()).append(",");
if (getLastModified() != null)
sb.append("LastModified: ").append(getLastModified()).append(",");
if (getStatusCode() != null)
sb.append("StatusCode: ").append(getStatusCode());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetObjectResult == false)
return false;
GetObjectResult other = (GetObjectResult) obj;
if (other.getBody() == null ^ this.getBody() == null)
return false;
if (other.getBody() != null && other.getBody().equals(this.getBody()) == false)
return false;
if (other.getCacheControl() == null ^ this.getCacheControl() == null)
return false;
if (other.getCacheControl() != null && other.getCacheControl().equals(this.getCacheControl()) == false)
return false;
if (other.getContentRange() == null ^ this.getContentRange() == null)
return false;
if (other.getContentRange() != null && other.getContentRange().equals(this.getContentRange()) == false)
return false;
if (other.getContentLength() == null ^ this.getContentLength() == null)
return false;
if (other.getContentLength() != null && other.getContentLength().equals(this.getContentLength()) == false)
return false;
if (other.getContentType() == null ^ this.getContentType() == null)
return false;
if (other.getContentType() != null && other.getContentType().equals(this.getContentType()) == false)
return false;
if (other.getETag() == null ^ this.getETag() == null)
return false;
if (other.getETag() != null && other.getETag().equals(this.getETag()) == false)
return false;
if (other.getLastModified() == null ^ this.getLastModified() == null)
return false;
if (other.getLastModified() != null && other.getLastModified().equals(this.getLastModified()) == false)
return false;
if (other.getStatusCode() == null ^ this.getStatusCode() == null)
return false;
if (other.getStatusCode() != null && other.getStatusCode().equals(this.getStatusCode()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBody() == null) ? 0 : getBody().hashCode());
hashCode = prime * hashCode + ((getCacheControl() == null) ? 0 : getCacheControl().hashCode());
hashCode = prime * hashCode + ((getContentRange() == null) ? 0 : getContentRange().hashCode());
hashCode = prime * hashCode + ((getContentLength() == null) ? 0 : getContentLength().hashCode());
hashCode = prime * hashCode + ((getContentType() == null) ? 0 : getContentType().hashCode());
hashCode = prime * hashCode + ((getETag() == null) ? 0 : getETag().hashCode());
hashCode = prime * hashCode + ((getLastModified() == null) ? 0 : getLastModified().hashCode());
hashCode = prime * hashCode + ((getStatusCode() == null) ? 0 : getStatusCode().hashCode());
return hashCode;
}
@Override
public GetObjectResult clone() {
try {
return (GetObjectResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.databasemigrationservice.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes an identifiable significant activity that affects a replication instance or task. This object can provide
* the message, the available event categories, the date and source of the event, and the DMS resource type.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dms-2016-01-01/Event" target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Event implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The identifier of an event source.
* </p>
*/
private String sourceIdentifier;
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*/
private String sourceType;
/**
* <p>
* The event message.
* </p>
*/
private String message;
/**
* <p>
* The event categories available for the specified source type.
* </p>
*/
private java.util.List<String> eventCategories;
/**
* <p>
* The date of the event.
* </p>
*/
private java.util.Date date;
/**
* <p>
* The identifier of an event source.
* </p>
*
* @param sourceIdentifier
* The identifier of an event source.
*/
public void setSourceIdentifier(String sourceIdentifier) {
this.sourceIdentifier = sourceIdentifier;
}
/**
* <p>
* The identifier of an event source.
* </p>
*
* @return The identifier of an event source.
*/
public String getSourceIdentifier() {
return this.sourceIdentifier;
}
/**
* <p>
* The identifier of an event source.
* </p>
*
* @param sourceIdentifier
* The identifier of an event source.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Event withSourceIdentifier(String sourceIdentifier) {
setSourceIdentifier(sourceIdentifier);
return this;
}
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*
* @param sourceType
* The type of DMS resource that generates events. </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* @see SourceType
*/
public void setSourceType(String sourceType) {
this.sourceType = sourceType;
}
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*
* @return The type of DMS resource that generates events. </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* @see SourceType
*/
public String getSourceType() {
return this.sourceType;
}
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*
* @param sourceType
* The type of DMS resource that generates events. </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* @return Returns a reference to this object so that method calls can be chained together.
* @see SourceType
*/
public Event withSourceType(String sourceType) {
setSourceType(sourceType);
return this;
}
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*
* @param sourceType
* The type of DMS resource that generates events. </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* @see SourceType
*/
public void setSourceType(SourceType sourceType) {
withSourceType(sourceType);
}
/**
* <p>
* The type of DMS resource that generates events.
* </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* </p>
*
* @param sourceType
* The type of DMS resource that generates events. </p>
* <p>
* Valid values: replication-instance | endpoint | replication-task
* @return Returns a reference to this object so that method calls can be chained together.
* @see SourceType
*/
public Event withSourceType(SourceType sourceType) {
this.sourceType = sourceType.toString();
return this;
}
/**
* <p>
* The event message.
* </p>
*
* @param message
* The event message.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* The event message.
* </p>
*
* @return The event message.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* The event message.
* </p>
*
* @param message
* The event message.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Event withMessage(String message) {
setMessage(message);
return this;
}
/**
* <p>
* The event categories available for the specified source type.
* </p>
*
* @return The event categories available for the specified source type.
*/
public java.util.List<String> getEventCategories() {
return eventCategories;
}
/**
* <p>
* The event categories available for the specified source type.
* </p>
*
* @param eventCategories
* The event categories available for the specified source type.
*/
public void setEventCategories(java.util.Collection<String> eventCategories) {
if (eventCategories == null) {
this.eventCategories = null;
return;
}
this.eventCategories = new java.util.ArrayList<String>(eventCategories);
}
/**
* <p>
* The event categories available for the specified source type.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEventCategories(java.util.Collection)} or {@link #withEventCategories(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param eventCategories
* The event categories available for the specified source type.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Event withEventCategories(String... eventCategories) {
if (this.eventCategories == null) {
setEventCategories(new java.util.ArrayList<String>(eventCategories.length));
}
for (String ele : eventCategories) {
this.eventCategories.add(ele);
}
return this;
}
/**
* <p>
* The event categories available for the specified source type.
* </p>
*
* @param eventCategories
* The event categories available for the specified source type.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Event withEventCategories(java.util.Collection<String> eventCategories) {
setEventCategories(eventCategories);
return this;
}
/**
* <p>
* The date of the event.
* </p>
*
* @param date
* The date of the event.
*/
public void setDate(java.util.Date date) {
this.date = date;
}
/**
* <p>
* The date of the event.
* </p>
*
* @return The date of the event.
*/
public java.util.Date getDate() {
return this.date;
}
/**
* <p>
* The date of the event.
* </p>
*
* @param date
* The date of the event.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Event withDate(java.util.Date date) {
setDate(date);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSourceIdentifier() != null)
sb.append("SourceIdentifier: ").append(getSourceIdentifier()).append(",");
if (getSourceType() != null)
sb.append("SourceType: ").append(getSourceType()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage()).append(",");
if (getEventCategories() != null)
sb.append("EventCategories: ").append(getEventCategories()).append(",");
if (getDate() != null)
sb.append("Date: ").append(getDate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Event == false)
return false;
Event other = (Event) obj;
if (other.getSourceIdentifier() == null ^ this.getSourceIdentifier() == null)
return false;
if (other.getSourceIdentifier() != null && other.getSourceIdentifier().equals(this.getSourceIdentifier()) == false)
return false;
if (other.getSourceType() == null ^ this.getSourceType() == null)
return false;
if (other.getSourceType() != null && other.getSourceType().equals(this.getSourceType()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
if (other.getEventCategories() == null ^ this.getEventCategories() == null)
return false;
if (other.getEventCategories() != null && other.getEventCategories().equals(this.getEventCategories()) == false)
return false;
if (other.getDate() == null ^ this.getDate() == null)
return false;
if (other.getDate() != null && other.getDate().equals(this.getDate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSourceIdentifier() == null) ? 0 : getSourceIdentifier().hashCode());
hashCode = prime * hashCode + ((getSourceType() == null) ? 0 : getSourceType().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
hashCode = prime * hashCode + ((getEventCategories() == null) ? 0 : getEventCategories().hashCode());
hashCode = prime * hashCode + ((getDate() == null) ? 0 : getDate().hashCode());
return hashCode;
}
@Override
public Event clone() {
try {
return (Event) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.databasemigrationservice.model.transform.EventMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2007, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.content.impl.test;
import java.util.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.content.api.ContentCollection;
import org.sakaiproject.content.api.ContentEntity;
import org.sakaiproject.content.api.ContentHostingHandler;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.content.api.GroupAwareEdit;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.exception.InconsistentException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.time.cover.TimeService;
import org.sakaiproject.util.BaseResourcePropertiesEdit;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* MockContentEntity
*
*/
public class MockContentEntity implements ContentEntity, GroupAwareEdit
{
private static final Log logger = LogFactory.getLog(MockContentEntity.class);
protected String entityId;
protected String containingCollectionId;
protected String reference;
protected String resourceType;
protected ResourceProperties resourceProperties;
protected AccessMode accessMode;
protected AccessMode inheritedAccess;
protected Map<String, Group> groupMap = new HashMap<String, Group>();
protected Map<String, Group> inheritedGroupMap = new HashMap<String, Group>();
protected boolean isAvailable;
protected boolean isHidden;
protected Time releaseDate;
protected Time retractDate;
protected boolean isPublic;
protected boolean inheritsPubview;
protected Map<String, MockContentEntity> memberMap = new HashMap<String, MockContentEntity>();
protected boolean isActiveEdit;
protected Set<String> roleIds = new LinkedHashSet<String>();
public MockContentEntity() {
this.resourceProperties = new BaseResourcePropertiesEdit();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getContainingCollection()
*/
public ContentCollection getContainingCollection()
{
MockContentCollection rv = new MockContentCollection(this.containingCollectionId);
// need to add members?
return rv;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getMember(java.lang.String)
*/
// TODO: why is this in ContentEntity????
public ContentEntity getMember(String nextId)
{
return this.memberMap.get(nextId);
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getContentHandler()
*/
public ContentHostingHandler getContentHandler()
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#setContentHandler(org.sakaiproject.content.api.ContentHostingHandler)
*/
public void setContentHandler(ContentHostingHandler chh)
{
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#setVirtualContentEntity(org.sakaiproject.content.api.ContentEntity)
*/
public void setVirtualContentEntity(ContentEntity ce)
{
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getUrl(boolean)
*/
public String getUrl(boolean relative)
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getVirtualContentEntity()
*/
public ContentEntity getVirtualContentEntity()
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getReference(java.lang.String)
*/
public String getReference(String rootProperty)
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getUrl()
*/
public String getUrl()
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getUrl(java.lang.String)
*/
public String getUrl(String rootProperty)
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#toXml(org.w3c.dom.Document, java.util.Stack)
*/
public Element toXml(Document doc, Stack stack)
{
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setPublicAccess()
*/
public void setPublicAccess() throws InconsistentException, PermissionException
{
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#getResourceType()
*/
public String getResourceType()
{
return this.resourceType;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#isCollection()
*/
public boolean isCollection()
{
return (this instanceof ContentCollection);
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.ContentEntity#isResource()
*/
public boolean isResource()
{
return (this instanceof ContentResource);
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getAccess()
*/
public AccessMode getAccess()
{
return this.accessMode;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getGroupObjects()
*/
public Collection getGroupObjects()
{
return this.groupMap.values();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getGroups()
*/
public Collection getGroups()
{
return this.groupMap.keySet();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getInheritedAccess()
*/
public AccessMode getInheritedAccess()
{
return this.inheritedAccess;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getInheritedGroupObjects()
*/
public Collection getInheritedGroupObjects()
{
return this.inheritedGroupMap.values();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getInheritedGroups()
*/
public Collection getInheritedGroups()
{
return this.inheritedGroupMap.keySet();
}
public Set<String> getRoleAccessIds() {
return roleIds;
}
public Set<String> getInheritedRoleAccessIds() {
return new LinkedHashSet<String>();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getReleaseDate()
*/
public Time getReleaseDate()
{
return this.releaseDate;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#getRetractDate()
*/
public Time getRetractDate()
{
return this.retractDate;
}
public Date getReleaseTime() {
return new Date(this.releaseDate.getTime());
}
public Date getRetractTime() {
return new Date(this.getRetractDate().getTime());
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#isAvailable()
*/
public boolean isAvailable()
{
return this.isAvailable;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEntity#isHidden()
*/
public boolean isHidden()
{
return this.isHidden;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getId()
*/
public String getId()
{
return this.entityId;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getProperties()
*/
public ResourceProperties getProperties()
{
return this.resourceProperties;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Entity#getReference()
*/
public String getReference()
{
return this.reference;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#clearGroupAccess()
*/
public void clearGroupAccess() throws InconsistentException, PermissionException
{
if(false)
{
throw new PermissionException("userId", "content.revise", this.entityId);
}
if(this.accessMode != AccessMode.GROUPED)
{
throw new InconsistentException(entityId);
}
this.accessMode = AccessMode.INHERITED;
this.groupMap.clear();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#clearPublicAccess()
*/
public void clearPublicAccess() throws PermissionException
{
if(! this.isPublic)
{
throw new PermissionException(null, null, entityId);
}
this.isPublic = false;
this.accessMode = AccessMode.INHERITED;
this.groupMap.clear();
}
public void addRoleAccess(String roleId) throws InconsistentException, PermissionException {
roleIds.add(roleId);
}
public void removeRoleAccess(String roleId) throws InconsistentException, PermissionException {
roleIds.remove(roleId);
}
public void clearRoleAccess() throws PermissionException {
roleIds.clear();
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setAvailability(boolean, org.sakaiproject.time.api.Time, org.sakaiproject.time.api.Time)
*/
public void setAvailability(boolean hidden, Time releaseDate, Time retractDate)
{
this.isHidden = hidden;
if(hidden)
{
this.releaseDate = null;
this.retractDate = null;
}
else
{
if(releaseDate != null)
{
this.releaseDate = TimeService.newTime(releaseDate.getTime());
}
if(retractDate != null)
{
this.retractDate = TimeService.newTime(retractDate.getTime());
}
}
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setGroupAccess(java.util.Collection)
*/
public void setGroupAccess(Collection groups) throws InconsistentException, PermissionException
{
if(false)
{
throw new PermissionException("userId", "content.revise", this.entityId);
}
if(groups == null || groups.isEmpty() || this.inheritsPubview)
{
throw new InconsistentException(entityId);
}
this.groupMap.clear();
for(String groupId : (Collection<String>) groups)
{
Group group = SiteService.findGroup(groupId);
if(group != null)
{
this.groupMap.put(groupId, group);
}
}
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setHidden()
*/
public void setHidden()
{
this.isHidden = true;
this.releaseDate = null;
this.retractDate = null;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setReleaseDate(org.sakaiproject.time.api.Time)
*/
public void setReleaseDate(Time time)
{
this.releaseDate = TimeService.newTime(time.getTime());
}
public void setReleaseTime(Date time)
{
this.releaseDate = TimeService.newTime(time.getTime());
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setResourceType(java.lang.String)
*/
public void setResourceType(String typeId)
{
this.resourceType = typeId;
}
/* (non-Javadoc)
* @see org.sakaiproject.content.api.GroupAwareEdit#setRetractDate(org.sakaiproject.time.api.Time)
*/
public void setRetractDate(Time time)
{
this.retractDate = TimeService.newTime(time.getTime());
}
public void setRetractTime(Date time)
{
this.retractDate = TimeService.newTime(time.getTime());
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Edit#getPropertiesEdit()
*/
public ResourcePropertiesEdit getPropertiesEdit()
{
return (ResourcePropertiesEdit) this.resourceProperties;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.Edit#isActiveEdit()
*/
public boolean isActiveEdit()
{
return this.isActiveEdit;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.lookup.impl;
import com.intellij.codeInsight.completion.CodeCompletionFeatures;
import com.intellij.codeInsight.completion.CompletionProgressIndicator;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.lookup.CharFilter;
import com.intellij.codeInsight.lookup.LookupFocusDegree;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.editor.Caret;
import com.intellij.openapi.editor.CaretAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorAction;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.actionSystem.EditorActionManager;
import com.intellij.openapi.project.Project;
import com.intellij.ui.ScrollingUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public abstract class LookupActionHandler extends EditorActionHandler {
protected final EditorActionHandler myOriginalHandler;
public LookupActionHandler(EditorActionHandler originalHandler) {
myOriginalHandler = originalHandler;
}
@Override
public boolean executeInCommand(@NotNull Editor editor, DataContext dataContext) {
return LookupManager.getActiveLookup(editor) == null;
}
@Override
public void doExecute(@NotNull Editor editor, Caret caret, DataContext dataContext){
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(editor);
if (lookup == null || !lookup.isAvailableToUser()) {
Project project = editor.getProject();
if (project != null && lookup != null) {
LookupManager.getInstance(project).hideActiveLookup();
}
myOriginalHandler.execute(editor, caret, dataContext);
return;
}
lookup.markSelectionTouched();
executeInLookup(lookup, dataContext, caret);
}
protected abstract void executeInLookup(LookupImpl lookup, DataContext context, @Nullable Caret caret);
@Override
public boolean isEnabledForCaret(@NotNull Editor editor, @NotNull Caret caret, DataContext dataContext) {
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(editor);
return lookup != null || myOriginalHandler.isEnabled(editor, caret, dataContext);
}
private static void executeUpOrDown(LookupImpl lookup, boolean up) {
if (!lookup.isFocused()) {
boolean semiFocused = lookup.getLookupFocusDegree() == LookupFocusDegree.SEMI_FOCUSED;
lookup.setLookupFocusDegree(LookupFocusDegree.FOCUSED);
if (!up && !semiFocused) {
return;
}
}
if (up) {
ScrollingUtil.moveUp(lookup.getList(), 0);
} else {
ScrollingUtil.moveDown(lookup.getList(), 0);
}
lookup.markSelectionTouched();
lookup.refreshUi(false, true);
}
public static class DownHandler extends LookupActionHandler {
public DownHandler(EditorActionHandler originalHandler){
super(originalHandler);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
executeUpOrDown(lookup, false);
}
}
public static class UpAction extends EditorAction {
public UpAction() {
super(new UpDownInEditorHandler(true));
}
}
public static class DownAction extends EditorAction {
public DownAction() {
super(new UpDownInEditorHandler(false));
}
}
private static final class UpDownInEditorHandler extends EditorActionHandler {
private final boolean myUp;
private UpDownInEditorHandler(boolean up) {
myUp = up;
}
@Override
public boolean executeInCommand(@NotNull Editor editor, DataContext dataContext) {
return false;
}
@Override
protected boolean isEnabledForCaret(@NotNull Editor editor, @NotNull Caret caret, DataContext dataContext) {
return LookupManager.getActiveLookup(editor) != null;
}
@Override
protected void doExecute(@NotNull Editor editor, @Nullable Caret caret, DataContext dataContext) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(CodeCompletionFeatures.EDITING_COMPLETION_CONTROL_ARROWS);
LookupImpl lookup = (LookupImpl)LookupManager.getActiveLookup(editor);
assert lookup != null;
lookup.hideLookup(true);
EditorActionManager.getInstance().getActionHandler(myUp ? IdeActions.ACTION_EDITOR_MOVE_CARET_UP
: IdeActions.ACTION_EDITOR_MOVE_CARET_DOWN)
.execute(editor, caret, dataContext);
}
}
public static class UpHandler extends LookupActionHandler {
public UpHandler(EditorActionHandler originalHandler){
super(originalHandler);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
if (!UISettings.getInstance().getCycleScrolling() && !lookup.isFocused() && lookup.getList().getSelectedIndex() == 0) {
myOriginalHandler.execute(lookup.getEditor(), caret, context);
return;
}
executeUpOrDown(lookup, true);
}
}
public static class PageDownHandler extends LookupActionHandler {
public PageDownHandler(final EditorActionHandler originalHandler) {
super(originalHandler);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
lookup.setLookupFocusDegree(LookupFocusDegree.FOCUSED);
ScrollingUtil.movePageDown(lookup.getList());
}
}
public static class PageUpHandler extends LookupActionHandler {
public PageUpHandler(EditorActionHandler originalHandler){
super(originalHandler);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
lookup.setLookupFocusDegree(LookupFocusDegree.FOCUSED);
ScrollingUtil.movePageUp(lookup.getList());
}
}
public static class LeftHandler extends LookupActionHandler {
public LeftHandler(EditorActionHandler originalHandler) {
super(originalHandler);
}
@Override
protected void executeInLookup(final LookupImpl lookup, DataContext context, Caret caret) {
if (!lookup.isCompletion()) {
myOriginalHandler.execute(lookup.getEditor(), caret, context);
return;
}
if (!lookup.performGuardedChange(() -> lookup.getEditor().getSelectionModel().removeSelection())) {
return;
}
BackspaceHandler.truncatePrefix(context, lookup, myOriginalHandler, lookup.getLookupStart() - 1, caret);
}
}
public static class RightHandler extends LookupActionHandler {
public RightHandler(EditorActionHandler originalHandler) {
super(originalHandler);
}
@Override
protected void executeInLookup(LookupImpl lookup, DataContext context, final Caret caret) {
final Editor editor = lookup.getEditor();
final int offset = editor.getCaretModel().getOffset();
final CharSequence seq = editor.getDocument().getCharsSequence();
if (seq.length() <= offset || !lookup.isCompletion()) {
myOriginalHandler.execute(editor, caret, context);
return;
}
char c = seq.charAt(offset);
CharFilter.Result lookupAction = LookupTypedHandler.getLookupAction(c, lookup);
if (lookupAction != CharFilter.Result.ADD_TO_PREFIX || Character.isWhitespace(c)) {
myOriginalHandler.execute(editor, caret, context);
return;
}
if (!lookup.performGuardedChange(() -> {
CaretAction action = lookupCaret -> {
lookupCaret.removeSelection();
int caretOffset = lookupCaret.getOffset();
if (caretOffset < seq.length()) {
lookupCaret.moveToOffset(caretOffset + 1);
}
};
if (caret == null) {
editor.getCaretModel().runForEachCaret(action);
}
else {
action.perform(caret);
}
})) {
return;
}
lookup.fireBeforeAppendPrefix(c);
lookup.appendPrefix(c);
final CompletionProgressIndicator completion = CompletionServiceImpl.getCurrentCompletionProgressIndicator();
if (completion != null) {
completion.prefixUpdated();
}
}
}
}
| |
package com.jcumulus.server.rtmfp.flow;
/**
* jCumulus is a Java port of Cumulus OpenRTMP
*
* Copyright 2011 OpenRTMFP
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License received along this program for more
* details (or else see http://www.gnu.org/licenses/).
*
*
* This file is a part of jCumulus.
*/
import com.jcumulus.server.rtmfp.packet.*;
import com.jcumulus.server.rtmfp.publisher.E;
import com.google.common.base.Strings;
import java.util.LinkedList;
import org.apache.log4j.Logger;
public class B
{
private static final Logger D = Logger.getLogger(B.class);
private Packet F;
boolean C;
int E;
int K;
int J;
LinkedList A;
LinkedList H;
LinkedList B;
LinkedList I;
LinkedList G;
public B(AudioPacket a)
{
A = new LinkedList();
H = new LinkedList();
B = new LinkedList();
I = new LinkedList();
G = new LinkedList();
F = a;
}
public void K()
{
C = true;
}
public void C()
{
C = false;
}
public String E()
{
A();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
return "";
}
if(h != com.jcumulus.server.rtmfp.flow.H.String)
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF String type").toString());
return null;
}
F.D(1);
if(E != 0)
return J();
if(F() == 12)
return new String(F.F(F.C()));
else
return new String(F.F(F.E() & 0xffff));
}
public void P()
{
A();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
return;
} else
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF Null type").toString());
return;
}
}
public Double N()
{
A();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
return Double.valueOf(0.0D);
}
if(h != com.jcumulus.server.rtmfp.flow.H.Number)
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF Number type").toString());
return Double.valueOf(0.0D);
} else
{
F.D(1);
return Double.valueOf(F.B());
}
}
private int H()
{
A();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
return 0;
}
if(h != com.jcumulus.server.rtmfp.flow.H.Integer && h != com.jcumulus.server.rtmfp.flow.H.Number)
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF Integer type").toString());
return 0;
}
F.D(1);
if(h == com.jcumulus.server.rtmfp.flow.H.Number)
return (int)F.B();
int i = F.J();
if(i > 0xfffffff)
i -= 0x20000000;
return i;
}
public boolean O()
{
A();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
return false;
}
if(h != com.jcumulus.server.rtmfp.flow.H.Boolean)
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF Boolean type").toString());
return false;
}
if(E != 0)
{
return F.L() != 2;
} else
{
F.D(1);
return F.L() != 0;
}
}
private C D()
{
A();
C c = new C();
H h = G();
if(h == com.jcumulus.server.rtmfp.flow.H.Null)
{
F.D(1);
c.A(false);
return c;
}
if(h != com.jcumulus.server.rtmfp.flow.H.Object)
{
D.error((new StringBuilder()).append("Type ").append(h).append(" is not a AMF Object type").toString());
c.A(false);
return c;
}
if(E == 0)
{
if(C)
I.push(Integer.valueOf(F.H()));
if(F() == 16)
{
F.D(1);
c.A(J());
} else
{
F.D(1);
}
D d = new D(E);
A.push(d);
if(J != 0)
d.B = J;
d.E = true;
c.A(true);
return c;
}
F.D(1);
int i = F.H();
int j = F.J();
boolean flag = (j & 1) != 0;
j >>= 1;
if(!flag && j > B.size())
{
D.error("AMF3 reference not found");
c.A(false);
return c;
}
D d1 = new D(E);
A.push(d1);
if(flag)
{
if(C)
B.push(Integer.valueOf(i));
} else
{
d1.B = F.H();
F.E(((Integer)B.get(j)).intValue());
j = F.J() >> 1;
}
flag = (j & 1) != 0;
j >>= 1;
if(flag)
{
G.push(Integer.valueOf(i));
c.A(J());
} else
if(j <= G.size())
{
K = F.H();
F.E(((Integer)G.get(j)).intValue());
j = F.J() >> 2;
c.A(J());
} else
{
D.error("AMF3 classDef reference not found");
j = 2;
}
if((j & 1) != 0)
d1.C = true;
else
if((j & 2) != 0)
d1.E = true;
j >>= 2;
if(!d1.C)
{
d1.F = new LinkedList();
for(int k = 0; k < j; k++)
{
String s = J();
d1.F.add(s);
}
}
A();
c.A(true);
return c;
}
private G I()
{
G g = new G();
A();
if(A.size() == 0)
{
D.error("AMFReader::readItem called without a AMFReader::readObject or a AMFReader::readArray before");
g.A(com.jcumulus.server.rtmfp.flow.H.End);
return g;
}
D d = (D)A.getLast();
E = d.D;
boolean flag = false;
if(d.G == 17)
{
D.error("AMFReader::readItem on a dictionary, used AMFReader::readKey and AMFReader::readValue rather");
g.A(com.jcumulus.server.rtmfp.flow.H.End);
return g;
}
if(d.F.size() > 0)
{
g.A((String)d.F.getFirst());
d.F.removeFirst();
} else
if(d.G == 10)
{
if(d.A == 0)
{
flag = true;
} else
{
d.A--;
g.A("");
}
} else
if(!d.E)
{
if(d.C)
{
d.C = false;
g.A(com.jcumulus.server.rtmfp.flow.H.RawObjectContent);
return g;
}
flag = true;
} else
{
String s = J();
g.A(s);
if(Strings.isNullOrEmpty(s))
{
if(d.G == 9)
{
d.G = 10;
return I();
}
flag = true;
} else
if(d.G == 0);
}
if(flag)
{
if(E == 0 && d.G != 10)
{
byte byte0 = F.L();
if(byte0 != 9)
D.error("AMF0 end marker object absent");
}
K = d.B;
A();
A.removeLast();
g.A(com.jcumulus.server.rtmfp.flow.H.End);
return g;
} else
{
g.A(G());
return g;
}
}
private String J()
{
if(E == 0)
return new String(F.A());
int i = F.H();
int j = F.J();
boolean flag = (j & 1) != 0;
j >>= 1;
String s;
if(flag)
{
s = new String(F.F(j));
if(!Strings.isNullOrEmpty(s))
H.push(Integer.valueOf(i));
} else
{
if(j > H.size())
{
D.error("AMF3 string reference not found");
return null;
}
K = F.H();
F.E(((Integer)H.get(j)).intValue());
s = new String(F.F(F.J() >> 1));
A();
}
return s;
}
public H G()
{
A();
if(E != F.H())
if(A.size() > 0)
E = ((D)A.getLast()).D;
else
E = 0;
if(!B())
return com.jcumulus.server.rtmfp.flow.H.End;
byte byte0 = F();
if(E == 0 && byte0 == 17)
{
F.D(1);
E = F.H();
if(!B())
return com.jcumulus.server.rtmfp.flow.H.End;
byte0 = F();
}
if(E != 0)
{
switch(byte0)
{
case 0: // '\0'
case 1: // '\001'
return com.jcumulus.server.rtmfp.flow.H.Null;
case 2: // '\002'
case 3: // '\003'
return com.jcumulus.server.rtmfp.flow.H.Boolean;
case 4: // '\004'
return com.jcumulus.server.rtmfp.flow.H.Integer;
case 5: // '\005'
return com.jcumulus.server.rtmfp.flow.H.Number;
case 6: // '\006'
return com.jcumulus.server.rtmfp.flow.H.String;
case 8: // '\b'
return com.jcumulus.server.rtmfp.flow.H.Date;
case 9: // '\t'
return com.jcumulus.server.rtmfp.flow.H.Array;
case 17: // '\021'
return com.jcumulus.server.rtmfp.flow.H.Dictionary;
case 10: // '\n'
return com.jcumulus.server.rtmfp.flow.H.Object;
case 12: // '\f'
return com.jcumulus.server.rtmfp.flow.H.ByteArray;
case 7: // '\007'
case 11: // '\013'
case 13: // '\r'
case 14: // '\016'
case 15: // '\017'
case 16: // '\020'
default:
D.error((new StringBuilder()).append("Unknown AMF3 type ").append(byte0).toString());
break;
}
F.D(1);
return G();
}
switch(byte0)
{
case 5: // '\005'
case 6: // '\006'
return com.jcumulus.server.rtmfp.flow.H.Null;
case 1: // '\001'
return com.jcumulus.server.rtmfp.flow.H.Boolean;
case 0: // '\0'
return com.jcumulus.server.rtmfp.flow.H.Number;
case 2: // '\002'
case 12: // '\f'
return com.jcumulus.server.rtmfp.flow.H.String;
case 8: // '\b'
case 10: // '\n'
return com.jcumulus.server.rtmfp.flow.H.Array;
case 11: // '\013'
return com.jcumulus.server.rtmfp.flow.H.Date;
case 3: // '\003'
case 16: // '\020'
return com.jcumulus.server.rtmfp.flow.H.Object;
case 7: // '\007'
F.D(1);
short word0 = F.E();
if(word0 > I.size())
{
D.error("AMF0 reference not found");
return G();
} else
{
J = F.H();
F.E(((Integer)I.get(word0)).intValue());
return G();
}
case 9: // '\t'
D.error("AMF end object type without begin object type before");
F.D(1);
return G();
case 13: // '\r'
D.warn("Unsupported type in AMF format");
F.D(1);
return G();
case 4: // '\004'
case 14: // '\016'
case 15: // '\017'
default:
D.error((new StringBuilder()).append("Unknown AMF type ").append(byte0).toString());
F.D(1);
return G();
}
}
public com.jcumulus.server.rtmfp.flow.E L()
{
com.jcumulus.server.rtmfp.flow.E e = new com.jcumulus.server.rtmfp.flow.E();
C c = D();
if(!c.A())
return null;
if(!Strings.isNullOrEmpty(c.C()))
D.warn((new StringBuilder()).append("Object seems not be a simple object because it has a ").append(c.C()).append(" type").toString());
do
{
G g;
if((g = I()).B() == com.jcumulus.server.rtmfp.flow.H.End)
break;
String s = g.A();
if(com.jcumulus.server.rtmfp.flow.H.Null == g.B())
{
P();
e.G(s);
continue;
}
if(com.jcumulus.server.rtmfp.flow.H.Boolean == g.B())
{
e.A(s, O());
continue;
}
if(com.jcumulus.server.rtmfp.flow.H.Integer == g.B())
{
e.A(s, H());
continue;
}
if(com.jcumulus.server.rtmfp.flow.H.String == g.B())
{
String s1 = E();
e.B(s, s1);
continue;
}
if(com.jcumulus.server.rtmfp.flow.H.Number == g.B())
{
e.A(s, N().doubleValue());
continue;
}
if(com.jcumulus.server.rtmfp.flow.H.Date == g.B())
break;
D.error((new StringBuilder()).append("AMF ").append(g.B()).append(" type unsupported in an AMFDataObj conversion").toString());
F.D(1);
} while(true);
return e;
}
private byte F()
{
return F.G()[0];
}
public boolean B()
{
A();
return F.I() > 0;
}
private void A()
{
if(K > 0)
{
F.E(K);
K = 0;
}
}
public Packet M()
{
return F;
}
}
| |
/*
* Copyright 2013 David Schreiber
* 2013 John Paul Nalog
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smartplace.alerta.coverflow;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.*;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Transformation;
import android.widget.Gallery;
import android.widget.SpinnerAdapter;
import com.smartplace.alerta.R;
public class FancyCoverFlow extends Gallery {
// =============================================================================
// Constants
// =============================================================================
public static final int ACTION_DISTANCE_AUTO = Integer.MAX_VALUE;
public static final float SCALEDOWN_GRAVITY_TOP = 0.0f;
public static final float SCALEDOWN_GRAVITY_CENTER = 0.5f;
public static final float SCALEDOWN_GRAVITY_BOTTOM = 1.0f;
// =============================================================================
// Private members
// =============================================================================
private float reflectionRatio = 0.4f;
private int reflectionGap = 20;
private boolean reflectionEnabled = false;
/**
* TODO: Doc
*/
private float unselectedAlpha;
/**
* Camera used for view transformation.
*/
private Camera transformationCamera;
/**
* TODO: Doc
*/
private int maxRotation = 75;
/**
* Factor (0-1) that defines how much the unselected children should be scaled down. 1 means no scaledown.
*/
private float unselectedScale;
/**
* TODO: Doc
*/
private float scaleDownGravity = SCALEDOWN_GRAVITY_CENTER;
/**
* Distance in pixels between the transformation effects (alpha, rotation, zoom) are applied.
*/
private int actionDistance;
/**
* Saturation factor (0-1) of items that reach the outer effects distance.
*/
private float unselectedSaturation;
// =============================================================================
// Constructors
// =============================================================================
public FancyCoverFlow(Context context) {
super(context);
this.initialize();
}
public FancyCoverFlow(Context context, AttributeSet attrs) {
super(context, attrs);
this.initialize();
this.applyXmlAttributes(attrs);
}
public FancyCoverFlow(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
this.initialize();
this.applyXmlAttributes(attrs);
}
private void initialize() {
this.transformationCamera = new Camera();
this.setSpacing(0);
}
private void applyXmlAttributes(AttributeSet attrs) {
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.FancyCoverFlow);
this.actionDistance = a.getInteger(R.styleable.FancyCoverFlow_actionDistance, ACTION_DISTANCE_AUTO);
this.scaleDownGravity = a.getFloat(R.styleable.FancyCoverFlow_scaleDownGravity, 1.0f);
this.maxRotation = a.getInteger(R.styleable.FancyCoverFlow_maxRotation, 45);
this.unselectedAlpha = a.getFloat(R.styleable.FancyCoverFlow_unselectedAlpha, 0.3f);
this.unselectedSaturation = a.getFloat(R.styleable.FancyCoverFlow_unselectedSaturation, 0.0f);
this.unselectedScale = a.getFloat(R.styleable.FancyCoverFlow_unselectedScale, 0.75f);
}
// =============================================================================
// Getter / Setter
// =============================================================================
public float getReflectionRatio() {
return reflectionRatio;
}
public void setReflectionRatio(float reflectionRatio) {
if (reflectionRatio <= 0 || reflectionRatio > 0.5f) {
throw new IllegalArgumentException("reflectionRatio may only be in the interval (0, 0.5]");
}
this.reflectionRatio = reflectionRatio;
if (this.getAdapter() != null) {
((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged();
}
}
public int getReflectionGap() {
return reflectionGap;
}
public void setReflectionGap(int reflectionGap) {
this.reflectionGap = reflectionGap;
if (this.getAdapter() != null) {
((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged();
}
}
public boolean isReflectionEnabled() {
return reflectionEnabled;
}
public void setReflectionEnabled(boolean reflectionEnabled) {
this.reflectionEnabled = reflectionEnabled;
if (this.getAdapter() != null) {
((FancyCoverFlowAdapter) this.getAdapter()).notifyDataSetChanged();
}
}
/**
* Use this to provide a {@link FancyCoverFlowAdapter} to the coverflow. This
* method will throw an {@link ClassCastException} if the passed adapter does not
* subclass {@link FancyCoverFlowAdapter}.
*
* @param adapter
*/
@Override
public void setAdapter(SpinnerAdapter adapter) {
if (!(adapter instanceof FancyCoverFlowAdapter)) {
throw new ClassCastException(FancyCoverFlow.class.getSimpleName() + " only works in conjunction with a " + FancyCoverFlowAdapter.class.getSimpleName());
}
super.setAdapter(adapter);
}
/**
* Returns the maximum rotation that is applied to items left and right of the center of the coverflow.
*
* @return
*/
public int getMaxRotation() {
return maxRotation;
}
/**
* Sets the maximum rotation that is applied to items left and right of the center of the coverflow.
*
* @param maxRotation
*/
public void setMaxRotation(int maxRotation) {
this.maxRotation = maxRotation;
}
/**
* TODO: Write doc
*
* @return
*/
public float getUnselectedAlpha() {
return this.unselectedAlpha;
}
/**
* TODO: Write doc
*
* @return
*/
public float getUnselectedScale() {
return unselectedScale;
}
/**
* TODO: Write doc
*
* @param unselectedScale
*/
public void setUnselectedScale(float unselectedScale) {
this.unselectedScale = unselectedScale;
}
/**
* TODO: Doc
*
* @return
*/
public float getScaleDownGravity() {
return scaleDownGravity;
}
/**
* TODO: Doc
*
* @param scaleDownGravity
*/
public void setScaleDownGravity(float scaleDownGravity) {
this.scaleDownGravity = scaleDownGravity;
}
/**
* TODO: Write doc
*
* @return
*/
public int getActionDistance() {
return actionDistance;
}
/**
* TODO: Write doc
*
* @param actionDistance
*/
public void setActionDistance(int actionDistance) {
this.actionDistance = actionDistance;
}
/**
* TODO: Write doc
*
* @param unselectedAlpha
*/
@Override
public void setUnselectedAlpha(float unselectedAlpha) {
super.setUnselectedAlpha(unselectedAlpha);
this.unselectedAlpha = unselectedAlpha;
}
/**
* TODO: Write doc
*
* @return
*/
public float getUnselectedSaturation() {
return unselectedSaturation;
}
/**
* TODO: Write doc
*
* @param unselectedSaturation
*/
public void setUnselectedSaturation(float unselectedSaturation) {
this.unselectedSaturation = unselectedSaturation;
}
// =============================================================================
// Supertype overrides
// =============================================================================
@Override
protected boolean getChildStaticTransformation(View child, Transformation t) {
// We can cast here because FancyCoverFlowAdapter only creates wrappers.
FancyCoverFlowItemWrapper item = (FancyCoverFlowItemWrapper) child;
// Since Jelly Bean childs won't get invalidated automatically, needs to be added for the smooth coverflow animation
if (android.os.Build.VERSION.SDK_INT >= 16) {
item.invalidate();
}
final int coverFlowWidth = this.getWidth();
final int coverFlowCenter = coverFlowWidth / 2;
final int childWidth = item.getWidth();
final int childHeight = item.getHeight();
final int childCenter = item.getLeft() + childWidth / 2;
// Use coverflow width when its defined as automatic.
final int actionDistance = (this.actionDistance == ACTION_DISTANCE_AUTO) ? (int) ((coverFlowWidth + childWidth) / 2.0f) : this.actionDistance;
// Calculate the abstract amount for all effects.
final float effectsAmount = Math.min(1.0f, Math.max(-1.0f, (1.0f / actionDistance) * (childCenter - coverFlowCenter)));
// Clear previous transformations and set transformation type (matrix + alpha).
t.clear();
t.setTransformationType(Transformation.TYPE_BOTH);
// Alpha
if (this.unselectedAlpha != 1) {
final float alphaAmount = (this.unselectedAlpha - 1) * Math.abs(effectsAmount) + 1;
t.setAlpha(alphaAmount);
}
// Saturation
if (this.unselectedSaturation != 1) {
// Pass over saturation to the wrapper.
final float saturationAmount = (this.unselectedSaturation - 1) * Math.abs(effectsAmount) + 1;
item.setSaturation(saturationAmount);
}
final Matrix imageMatrix = t.getMatrix();
// Apply rotation.
if (this.maxRotation != 0) {
final int rotationAngle = (int) (-effectsAmount * this.maxRotation);
this.transformationCamera.save();
this.transformationCamera.rotateY(rotationAngle);
this.transformationCamera.getMatrix(imageMatrix);
this.transformationCamera.restore();
}
// Zoom.
if (this.unselectedScale != 1) {
final float zoomAmount = (this.unselectedScale - 1) * Math.abs(effectsAmount) + 1;
// Calculate the scale anchor (y anchor can be altered)
final float translateX = childWidth / 2.0f;
final float translateY = childHeight * this.scaleDownGravity;
imageMatrix.preTranslate(-translateX, -translateY);
imageMatrix.postScale(zoomAmount, zoomAmount);
imageMatrix.postTranslate(translateX, translateY);
}
return true;
}
// =============================================================================
// Public classes
// =============================================================================
public static class LayoutParams extends Gallery.LayoutParams {
public LayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
}
public LayoutParams(int w, int h) {
super(w, h);
}
public LayoutParams(ViewGroup.LayoutParams source) {
super(source);
}
}
}
| |
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.store.Directory;
import org.junit.BeforeClass;
// TODO: cutover TestGeoUtils too?
public abstract class BaseGeoPointTestCase extends LuceneTestCase {
protected static final String FIELD_NAME = "point";
private static final double LON_SCALE = (0x1L<<GeoUtils.BITS)/360.0D;
private static final double LAT_SCALE = (0x1L<<GeoUtils.BITS)/180.0D;
private static double originLat;
private static double originLon;
private static double lonRange;
private static double latRange;
@BeforeClass
public static void beforeClassBase() throws Exception {
// Between 1.0 and 3.0:
lonRange = 2 * (random().nextDouble() + 0.5);
latRange = 2 * (random().nextDouble() + 0.5);
originLon = GeoUtils.normalizeLon(GeoUtils.MIN_LON_INCL + lonRange + (GeoUtils.MAX_LON_INCL - GeoUtils.MIN_LON_INCL - 2 * lonRange) * random().nextDouble());
originLat = GeoUtils.normalizeLat(GeoUtils.MIN_LAT_INCL + latRange + (GeoUtils.MAX_LAT_INCL - GeoUtils.MIN_LAT_INCL - 2 * latRange) * random().nextDouble());
}
// A particularly tricky adversary for BKD tree:
@Nightly
public void testSamePointManyTimes() throws Exception {
int numPoints = atLeast(1000);
// TODO: GeoUtils are potentially slow if we use small=false with heavy testing
boolean small = random().nextBoolean();
// Every doc has 2 points:
double theLat = randomLat(small);
double theLon = randomLon(small);
double[] lats = new double[numPoints];
Arrays.fill(lats, theLat);
double[] lons = new double[numPoints];
Arrays.fill(lons, theLon);
verify(small, lats, lons);
}
@Nightly
public void testAllLatEqual() throws Exception {
int numPoints = atLeast(10000);
// TODO: GeoUtils are potentially slow if we use small=false with heavy testing
// boolean small = random().nextBoolean();
boolean small = true;
double lat = randomLat(small);
double[] lats = new double[numPoints];
double[] lons = new double[numPoints];
boolean haveRealDoc = false;
for(int docID=0;docID<numPoints;docID++) {
int x = random().nextInt(20);
if (x == 17) {
// Some docs don't have a point:
lats[docID] = Double.NaN;
if (VERBOSE) {
System.out.println(" doc=" + docID + " is missing");
}
continue;
}
if (docID > 0 && x == 14 && haveRealDoc) {
int oldDocID;
while (true) {
oldDocID = random().nextInt(docID);
if (Double.isNaN(lats[oldDocID]) == false) {
break;
}
}
// Fully identical point:
lons[docID] = lons[oldDocID];
if (VERBOSE) {
System.out.println(" doc=" + docID + " lat=" + lat + " lon=" + lons[docID] + " (same lat/lon as doc=" + oldDocID + ")");
}
} else {
lons[docID] = randomLon(small);
haveRealDoc = true;
if (VERBOSE) {
System.out.println(" doc=" + docID + " lat=" + lat + " lon=" + lons[docID]);
}
}
lats[docID] = lat;
}
verify(small, lats, lons);
}
@Nightly
public void testAllLonEqual() throws Exception {
int numPoints = atLeast(10000);
// TODO: GeoUtils are potentially slow if we use small=false with heavy testing
// boolean small = random().nextBoolean();
boolean small = true;
double theLon = randomLon(small);
double[] lats = new double[numPoints];
double[] lons = new double[numPoints];
boolean haveRealDoc = false;
//System.out.println("theLon=" + theLon);
for(int docID=0;docID<numPoints;docID++) {
int x = random().nextInt(20);
if (x == 17) {
// Some docs don't have a point:
lats[docID] = Double.NaN;
if (VERBOSE) {
System.out.println(" doc=" + docID + " is missing");
}
continue;
}
if (docID > 0 && x == 14 && haveRealDoc) {
int oldDocID;
while (true) {
oldDocID = random().nextInt(docID);
if (Double.isNaN(lats[oldDocID]) == false) {
break;
}
}
// Fully identical point:
lats[docID] = lats[oldDocID];
if (VERBOSE) {
System.out.println(" doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon + " (same lat/lon as doc=" + oldDocID + ")");
}
} else {
lats[docID] = randomLat(small);
haveRealDoc = true;
if (VERBOSE) {
System.out.println(" doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon);
}
}
lons[docID] = theLon;
}
verify(small, lats, lons);
}
@Nightly
public void testMultiValued() throws Exception {
int numPoints = atLeast(10000);
// Every doc has 2 points:
double[] lats = new double[2*numPoints];
double[] lons = new double[2*numPoints];
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
initIndexWriterConfig(FIELD_NAME, iwc);
// We rely on docID order:
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
// TODO: GeoUtils are potentially slow if we use small=false with heavy testing
boolean small = random().nextBoolean();
//boolean small = true;
for (int id=0;id<numPoints;id++) {
Document doc = new Document();
lats[2*id] = randomLat(small);
lons[2*id] = randomLon(small);
doc.add(newStringField("id", ""+id, Field.Store.YES));
addPointToDoc(FIELD_NAME, doc, lats[2*id], lons[2*id]);
lats[2*id+1] = randomLat(small);
lons[2*id+1] = randomLon(small);
addPointToDoc(FIELD_NAME, doc, lats[2*id+1], lons[2*id+1]);
if (VERBOSE) {
System.out.println("id=" + id);
System.out.println(" lat=" + lats[2*id] + " lon=" + lons[2*id]);
System.out.println(" lat=" + lats[2*id+1] + " lon=" + lons[2*id+1]);
}
w.addDocument(doc);
}
if (random().nextBoolean()) {
w.forceMerge(1);
}
IndexReader r = w.getReader();
w.close();
// We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
IndexSearcher s = newSearcher(r, false);
int iters = atLeast(75);
for (int iter=0;iter<iters;iter++) {
GeoRect rect = randomRect(small, small == false);
if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter + " bbox=" + rect);
}
Query query = newBBoxQuery(FIELD_NAME, rect);
final FixedBitSet hits = new FixedBitSet(r.maxDoc());
s.search(query, new SimpleCollector() {
private int docBase;
@Override
public boolean needsScores() {
return false;
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
docBase = context.docBase;
}
@Override
public void collect(int doc) {
hits.set(docBase+doc);
}
});
boolean fail = false;
for(int docID=0;docID<lats.length/2;docID++) {
double latDoc1 = lats[2*docID];
double lonDoc1 = lons[2*docID];
double latDoc2 = lats[2*docID+1];
double lonDoc2 = lons[2*docID+1];
Boolean result1 = rectContainsPoint(rect, latDoc1, lonDoc1);
if (result1 == null) {
// borderline case: cannot test
continue;
}
Boolean result2 = rectContainsPoint(rect, latDoc2, lonDoc2);
if (result2 == null) {
// borderline case: cannot test
continue;
}
boolean expected = result1 == Boolean.TRUE || result2 == Boolean.TRUE;
if (hits.get(docID) != expected) {
String id = s.doc(docID).get("id");
if (expected) {
System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should match but did not");
} else {
System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should not match but did");
}
System.out.println(" rect=" + rect);
System.out.println(" lat=" + latDoc1 + " lon=" + lonDoc1 + "\n lat=" + latDoc2 + " lon=" + lonDoc2);
System.out.println(" result1=" + result1 + " result2=" + result2);
fail = true;
}
}
if (fail) {
fail("some hits were wrong");
}
}
r.close();
dir.close();
}
public void testRandomTiny() throws Exception {
// Make sure single-leaf-node case is OK:
doTestRandom(10);
}
public void testRandomMedium() throws Exception {
doTestRandom(10000);
}
@Nightly
public void testRandomBig() throws Exception {
doTestRandom(200000);
}
private void doTestRandom(int count) throws Exception {
int numPoints = atLeast(count);
if (VERBOSE) {
System.out.println("TEST: numPoints=" + numPoints);
}
double[] lats = new double[numPoints];
double[] lons = new double[numPoints];
// TODO: GeoUtils are potentially slow if we use small=false with heavy testing
boolean small = random().nextBoolean();
boolean haveRealDoc = false;
for (int id=0;id<numPoints;id++) {
int x = random().nextInt(20);
if (x == 17) {
// Some docs don't have a point:
lats[id] = Double.NaN;
if (VERBOSE) {
System.out.println(" id=" + id + " is missing");
}
continue;
}
if (id > 0 && x < 3 && haveRealDoc) {
int oldID;
while (true) {
oldID = random().nextInt(id);
if (Double.isNaN(lats[oldID]) == false) {
break;
}
}
if (x == 0) {
// Identical lat to old point
lats[id] = lats[oldID];
lons[id] = randomLon(small);
if (VERBOSE) {
System.out.println(" id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat as doc=" + oldID + ")");
}
} else if (x == 1) {
// Identical lon to old point
lats[id] = randomLat(small);
lons[id] = lons[oldID];
if (VERBOSE) {
System.out.println(" id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lon as doc=" + oldID + ")");
}
} else {
assert x == 2;
// Fully identical point:
lats[id] = lats[oldID];
lons[id] = lons[oldID];
if (VERBOSE) {
System.out.println(" id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat/lon as doc=" + oldID + ")");
}
}
} else {
lats[id] = randomLat(small);
lons[id] = randomLon(small);
haveRealDoc = true;
if (VERBOSE) {
System.out.println(" id=" + id + " lat=" + lats[id] + " lon=" + lons[id]);
}
}
}
verify(small, lats, lons);
}
public long scaleLon(final double val) {
return (long) ((val-GeoUtils.MIN_LON_INCL) * LON_SCALE);
}
public long scaleLat(final double val) {
return (long) ((val-GeoUtils.MIN_LAT_INCL) * LAT_SCALE);
}
public double unscaleLon(final long val) {
return (val / LON_SCALE) + GeoUtils.MIN_LON_INCL;
}
public double unscaleLat(final long val) {
return (val / LAT_SCALE) + GeoUtils.MIN_LAT_INCL;
}
public double randomLat(boolean small) {
double result;
if (small) {
result = GeoUtils.normalizeLat(originLat + latRange * (random().nextDouble() - 0.5));
} else {
result = -90 + 180.0 * random().nextDouble();
}
return unscaleLat(scaleLat(result));
}
public double randomLon(boolean small) {
double result;
if (small) {
result = GeoUtils.normalizeLon(originLon + lonRange * (random().nextDouble() - 0.5));
} else {
result = -180 + 360.0 * random().nextDouble();
}
return unscaleLon(scaleLon(result));
}
protected GeoRect randomRect(boolean small, boolean canCrossDateLine) {
double lat0 = randomLat(small);
double lat1 = randomLat(small);
double lon0 = randomLon(small);
double lon1 = randomLon(small);
if (lat1 < lat0) {
double x = lat0;
lat0 = lat1;
lat1 = x;
}
if (lat0 == lat1) {
lat1 = randomLat(small);
}
if (lon0 == lon1) {
lon1 = randomLon(small);
}
if (canCrossDateLine == false && lon1 < lon0) {
double x = lon0;
lon0 = lon1;
lon1 = x;
}
return new GeoRect(lon0, lon1, lat0, lat1);
}
protected void initIndexWriterConfig(String field, IndexWriterConfig iwc) {
}
protected abstract void addPointToDoc(String field, Document doc, double lat, double lon);
protected abstract Query newBBoxQuery(String field, GeoRect bbox);
protected abstract Query newDistanceQuery(String field, double centerLat, double centerLon, double radiusMeters);
protected abstract Query newDistanceRangeQuery(String field, double centerLat, double centerLon, double minRadiusMeters, double radiusMeters);
protected abstract Query newPolygonQuery(String field, double[] lats, double[] lons);
/** Returns null if it's borderline case */
protected abstract Boolean rectContainsPoint(GeoRect rect, double pointLat, double pointLon);
/** Returns null if it's borderline case */
protected abstract Boolean polyRectContainsPoint(GeoRect rect, double pointLat, double pointLon);
/** Returns null if it's borderline case */
protected abstract Boolean circleContainsPoint(double centerLat, double centerLon, double radiusMeters, double pointLat, double pointLon);
protected abstract Boolean distanceRangeContainsPoint(double centerLat, double centerLon, double minRadiusMeters, double radiusMeters, double pointLat, double pointLon);
private static abstract class VerifyHits {
public void test(boolean small, IndexSearcher s, NumericDocValues docIDToID, Set<Integer> deleted, Query query, double[] lats, double[] lons) throws Exception {
int maxDoc = s.getIndexReader().maxDoc();
final FixedBitSet hits = new FixedBitSet(maxDoc);
s.search(query, new SimpleCollector() {
private int docBase;
@Override
public boolean needsScores() {
return false;
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
docBase = context.docBase;
}
@Override
public void collect(int doc) {
hits.set(docBase+doc);
}
});
boolean fail = false;
for(int docID=0;docID<maxDoc;docID++) {
int id = (int) docIDToID.get(docID);
Boolean expected;
if (deleted.contains(id)) {
expected = false;
} else if (Double.isNaN(lats[id])) {
expected = false;
} else {
expected = shouldMatch(lats[id], lons[id]);
}
// null means it's a borderline case which is allowed to be wrong:
if (expected != null && hits.get(docID) != expected) {
if (expected) {
System.out.println(Thread.currentThread().getName() + ": id=" + id + " should match but did not");
} else {
System.out.println(Thread.currentThread().getName() + ": id=" + id + " should not match but did");
}
System.out.println(" small=" + small + " query=" + query +
" docID=" + docID + "\n lat=" + lats[id] + " lon=" + lons[id] +
"\n deleted?=" + deleted.contains(id));
if (Double.isNaN(lats[id]) == false) {
describe(docID, lats[id], lons[id]);
}
fail = true;
}
}
if (fail) {
fail("some hits were wrong");
}
}
/** Return true if we definitely should match, false if we definitely
* should not match, and null if it's a borderline case which might
* go either way. */
protected abstract Boolean shouldMatch(double lat, double lon);
protected abstract void describe(int docID, double lat, double lon);
}
protected void verify(final boolean small, final double[] lats, final double[] lons) throws Exception {
IndexWriterConfig iwc = newIndexWriterConfig();
// Else we can get O(N^2) merging:
int mbd = iwc.getMaxBufferedDocs();
if (mbd != -1 && mbd < lats.length/100) {
iwc.setMaxBufferedDocs(lats.length/100);
}
initIndexWriterConfig(FIELD_NAME, iwc);
Directory dir;
if (lats.length > 100000) {
dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
} else {
dir = newDirectory();
}
final Set<Integer> deleted = new HashSet<>();
// RandomIndexWriter is too slow here:
IndexWriter w = new IndexWriter(dir, iwc);
for(int id=0;id<lats.length;id++) {
Document doc = new Document();
doc.add(newStringField("id", ""+id, Field.Store.NO));
doc.add(new NumericDocValuesField("id", id));
if (Double.isNaN(lats[id]) == false) {
addPointToDoc(FIELD_NAME, doc, lats[id], lons[id]);
}
w.addDocument(doc);
if (id > 0 && random().nextInt(100) == 42) {
int idToDelete = random().nextInt(id);
w.deleteDocuments(new Term("id", ""+idToDelete));
deleted.add(idToDelete);
if (VERBOSE) {
System.out.println(" delete id=" + idToDelete);
}
}
}
if (random().nextBoolean()) {
w.forceMerge(1);
}
final IndexReader r = DirectoryReader.open(w, true);
w.close();
// We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
final IndexSearcher s = newSearcher(r, false);
// Make sure queries are thread safe:
int numThreads = TestUtil.nextInt(random(), 2, 5);
List<Thread> threads = new ArrayList<>();
final int iters = atLeast(75);
final CountDownLatch startingGun = new CountDownLatch(1);
final AtomicBoolean failed = new AtomicBoolean();
for(int i=0;i<numThreads;i++) {
Thread thread = new Thread() {
@Override
public void run() {
try {
_run();
} catch (Exception e) {
failed.set(true);
throw new RuntimeException(e);
}
}
private void _run() throws Exception {
startingGun.await();
NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
for (int iter=0;iter<iters && failed.get() == false;iter++) {
if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter + " s=" + s);
}
Query query;
VerifyHits verifyHits;
if (random().nextBoolean()) {
// BBox: don't allow dateline crossing when testing small:
final GeoRect bbox = randomRect(small, small == false);
query = newBBoxQuery(FIELD_NAME, bbox);
verifyHits = new VerifyHits() {
@Override
protected Boolean shouldMatch(double pointLat, double pointLon) {
return rectContainsPoint(bbox, pointLat, pointLon);
}
@Override
protected void describe(int docID, double lat, double lon) {
}
};
} else if (random().nextBoolean()) {
// Distance
final boolean rangeQuery = random().nextBoolean();
final double centerLat = randomLat(small);
final double centerLon = randomLon(small);
final double radiusMeters;
final double minRadiusMeters;
if (small) {
// Approx 3 degrees lon at the equator:
radiusMeters = random().nextDouble() * 333000 + 1.0;
} else {
// So the query can cover at most 50% of the earth's surface:
radiusMeters = random().nextDouble() * GeoProjectionUtils.SEMIMAJOR_AXIS * Math.PI / 2.0 + 1.0;
}
// generate a random minimum radius between 1% and 95% the max radius
minRadiusMeters = (0.01 + 0.94 * random().nextDouble()) * radiusMeters;
if (VERBOSE) {
final DecimalFormat df = new DecimalFormat("#,###.00", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
System.out.println(" radiusMeters = " + df.format(radiusMeters)
+ ((rangeQuery == true) ? " minRadiusMeters = " + df.format(minRadiusMeters) : ""));
}
try {
if (rangeQuery == true) {
query = newDistanceRangeQuery(FIELD_NAME, centerLat, centerLon, minRadiusMeters, radiusMeters);
} else {
query = newDistanceQuery(FIELD_NAME, centerLat, centerLon, radiusMeters);
}
} catch (IllegalArgumentException e) {
if (e.getMessage().contains("exceeds maxRadius")) {
continue;
}
throw e;
}
verifyHits = new VerifyHits() {
@Override
protected Boolean shouldMatch(double pointLat, double pointLon) {
final double radius = radiusMeters;
final double minRadius = minRadiusMeters;
if (rangeQuery == false) {
return circleContainsPoint(centerLat, centerLon, radius, pointLat, pointLon);
} else {
return distanceRangeContainsPoint(centerLat, centerLon, minRadius, radius, pointLat, pointLon);
}
}
@Override
protected void describe(int docID, double pointLat, double pointLon) {
double distanceKM = SloppyMath.haversin(centerLat, centerLon, pointLat, pointLon);
System.out.println(" docID=" + docID + " centerLon=" + centerLon + " centerLat=" + centerLat
+ " pointLon=" + pointLon + " pointLat=" + pointLat + " distanceMeters=" + (distanceKM * 1000)
+ " vs" + ((rangeQuery == true) ? " minRadiusMeters=" + minRadiusMeters : "") + " radiusMeters=" + radiusMeters);
}
};
// TODO: get poly query working with dateline crossing too (how?)!
} else {
// TODO: poly query can't handle dateline crossing yet:
final GeoRect bbox = randomRect(small, false);
// Polygon
double[] lats = new double[5];
double[] lons = new double[5];
lats[0] = bbox.minLat;
lons[0] = bbox.minLon;
lats[1] = bbox.maxLat;
lons[1] = bbox.minLon;
lats[2] = bbox.maxLat;
lons[2] = bbox.maxLon;
lats[3] = bbox.minLat;
lons[3] = bbox.maxLon;
lats[4] = bbox.minLat;
lons[4] = bbox.minLon;
query = newPolygonQuery(FIELD_NAME, lats, lons);
verifyHits = new VerifyHits() {
@Override
protected Boolean shouldMatch(double pointLat, double pointLon) {
return polyRectContainsPoint(bbox, pointLat, pointLon);
}
@Override
protected void describe(int docID, double lat, double lon) {
}
};
}
if (query != null) {
if (VERBOSE) {
System.out.println(" query=" + query);
}
verifyHits.test(small, s, docIDToID, deleted, query, lats, lons);
}
}
}
};
thread.setName("T" + i);
thread.start();
threads.add(thread);
}
startingGun.countDown();
for(Thread thread : threads) {
thread.join();
}
IOUtils.close(r, dir);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: AbstractCodePointMapping.java 1296483 2012-03-02 21:34:30Z gadams $ */
package org.apache.fop.fonts;
import java.util.Arrays;
import org.apache.xmlgraphics.fonts.Glyphs;
import org.apache.fop.util.CharUtilities;
/**
* Abstract base class for code point mapping classes (1-byte character encodings).
*/
public class AbstractCodePointMapping implements SingleByteEncoding {
private final String name;
private char[] latin1Map;
private char[] characters;
private char[] codepoints;
private char[] unicodeMap; //code point to Unicode char
private String[] charNameMap; //all character names in the encoding
/**
* Main constructor.
* @param name the name of the encoding
* @param table the table ([code point, unicode scalar value]+) with the mapping
*/
public AbstractCodePointMapping(String name, int[] table) {
this(name, table, null);
}
/**
* Extended constructor.
* @param name the name of the encoding
* @param table the table ([code point, unicode scalar value]+) with the mapping
* @param charNameMap all character names in the encoding (a value of null will be converted
* to ".notdef")
*/
public AbstractCodePointMapping(String name, int[] table, String[] charNameMap) {
this.name = name;
buildFromTable(table);
if (charNameMap != null) {
this.charNameMap = new String[256];
for (int i = 0; i < 256; i++) {
String charName = charNameMap[i];
if (charName == null) {
this.charNameMap[i] = Glyphs.NOTDEF;
} else {
this.charNameMap[i] = charName;
}
}
}
}
/**
* Builds the internal lookup structures based on a given table.
* @param table the table ([code point, unicode scalar value]+) with the mapping
*/
protected void buildFromTable(int[] table) {
int nonLatin1 = 0;
latin1Map = new char[256];
unicodeMap = new char[256];
Arrays.fill(unicodeMap, CharUtilities.NOT_A_CHARACTER);
for (int i = 0; i < table.length; i += 2) {
char unicode = (char)table[i + 1];
if (unicode < 256) {
if (latin1Map[unicode] == 0) {
latin1Map[unicode] = (char) table[i];
}
} else {
++nonLatin1;
}
if (unicodeMap[table[i]] == CharUtilities.NOT_A_CHARACTER) {
unicodeMap[table[i]] = unicode;
}
}
characters = new char[nonLatin1];
codepoints = new char[nonLatin1];
int top = 0;
for (int i = 0; i < table.length; i += 2) {
char c = (char) table[i + 1];
if (c >= 256) {
++top;
for (int j = top - 1; j >= 0; --j) {
if (j > 0 && characters[j - 1] >= c) {
characters[j] = characters[j - 1];
codepoints[j] = codepoints[j - 1];
} else {
characters[j] = c;
codepoints[j] = (char) table[i];
break;
}
}
}
}
}
/** {@inheritDoc} */
public String getName() {
return this.name;
}
/** {@inheritDoc} */
public final char mapChar(char c) {
if (c < 256) {
char latin1 = latin1Map[c];
if (latin1 > 0) {
return latin1;
}
}
int bot = 0;
int top = characters.length - 1;
while (top >= bot) {
int mid = (bot + top) / 2;
char mc = characters[mid];
if (c == mc) {
return codepoints[mid];
} else if (c < mc) {
top = mid - 1;
} else {
bot = mid + 1;
}
}
return NOT_FOUND_CODE_POINT;
}
/**
* Returns the main Unicode value that is associated with the given code point in the encoding.
* Note that multiple Unicode values can theoretically be mapped to one code point in the
* encoding.
* @param idx the code point in the encoding
* @return the Unicode value (or \uFFFF (NOT A CHARACTER) if no Unicode value is at that point)
*/
public final char getUnicodeForIndex(int idx) {
return this.unicodeMap[idx];
}
/** {@inheritDoc} */
public final char[] getUnicodeCharMap() {
char[] copy = new char[this.unicodeMap.length];
System.arraycopy(this.unicodeMap, 0, copy, 0, this.unicodeMap.length);
return copy;
}
/**
* Returns the index of a character/glyph with the given name. Note that this
* method is relatively slow and should only be used for fallback operations.
* @param charName the character name
* @return the index of the character in the encoding or -1 if it doesn't exist
*/
public short getCodePointForGlyph(String charName) {
String[] names = this.charNameMap;
if (names == null) {
names = getCharNameMap();
}
for (short i = 0, c = (short)names.length; i < c; i++) {
if (names[i].equals(charName)) {
return i;
}
}
return -1;
}
/** {@inheritDoc} */
public String[] getCharNameMap() {
if (this.charNameMap != null) {
String[] copy = new String[this.charNameMap.length];
System.arraycopy(this.charNameMap, 0, copy, 0, this.charNameMap.length);
return copy;
} else {
//Note: this is suboptimal but will probably never be used.
String[] derived = new String[256];
Arrays.fill(derived, Glyphs.NOTDEF);
for (int i = 0; i < 256; i++) {
char c = getUnicodeForIndex(i);
if (c != CharUtilities.NOT_A_CHARACTER) {
String charName = Glyphs.charToGlyphName(c);
if (charName.length() > 0) {
derived[i] = charName;
}
}
}
return derived;
}
}
/** {@inheritDoc} */
@Override
public String toString() {
return getName();
}
}
| |
/**
* Copyright 2011 - 2013 OpenCDS.org
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.opencds.vmr.v1_0.mappings.in;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import javax.xml.bind.JAXBElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.omg.dss.DSSRuntimeExceptionFault;
import org.omg.dss.EvaluationExceptionFault;
import org.omg.dss.InvalidDriDataFormatExceptionFault;
import org.omg.dss.InvalidTimeZoneOffsetExceptionFault;
import org.omg.dss.RequiredDataNotProvidedExceptionFault;
import org.omg.dss.UnrecognizedLanguageExceptionFault;
import org.omg.dss.UnrecognizedScopedEntityExceptionFault;
import org.omg.dss.UnsupportedLanguageExceptionFault;
import org.opencds.common.exceptions.DataFormatException;
import org.opencds.common.exceptions.ImproperUsageException;
import org.opencds.common.exceptions.InvalidDataException;
import org.opencds.common.structures.TimingDataKM;
import org.opencds.common.utilities.AbsoluteTimeDifference;
import org.opencds.common.utilities.DateUtility;
import org.opencds.vmr.v1_0.internal.AdministrableSubstance;
import org.opencds.vmr.v1_0.internal.AdverseEvent;
import org.opencds.vmr.v1_0.internal.AppointmentProposal;
import org.opencds.vmr.v1_0.internal.AppointmentRequest;
import org.opencds.vmr.v1_0.internal.CDSInput;
import org.opencds.vmr.v1_0.internal.DeniedAdverseEvent;
import org.opencds.vmr.v1_0.internal.DeniedProblem;
import org.opencds.vmr.v1_0.internal.EncounterEvent;
import org.opencds.vmr.v1_0.internal.Entity;
import org.opencds.vmr.v1_0.internal.EvalTime;
import org.opencds.vmr.v1_0.internal.EvaluatedPerson;
import org.opencds.vmr.v1_0.internal.EvaluatedPersonAgeAtEvalTime;
import org.opencds.vmr.v1_0.internal.EvaluatedPersonRelationship;
import org.opencds.vmr.v1_0.internal.Facility;
import org.opencds.vmr.v1_0.internal.FocalPersonId;
import org.opencds.vmr.v1_0.internal.Goal;
import org.opencds.vmr.v1_0.internal.GoalProposal;
import org.opencds.vmr.v1_0.internal.MissedAppointment;
import org.opencds.vmr.v1_0.internal.ObservationOrder;
import org.opencds.vmr.v1_0.internal.ObservationProposal;
import org.opencds.vmr.v1_0.internal.ObservationResult;
import org.opencds.vmr.v1_0.internal.Organization;
import org.opencds.vmr.v1_0.internal.Person;
import org.opencds.vmr.v1_0.internal.Problem;
import org.opencds.vmr.v1_0.internal.ProcedureEvent;
import org.opencds.vmr.v1_0.internal.ProcedureOrder;
import org.opencds.vmr.v1_0.internal.ProcedureProposal;
import org.opencds.vmr.v1_0.internal.ScheduledAppointment;
import org.opencds.vmr.v1_0.internal.ScheduledProcedure;
import org.opencds.vmr.v1_0.internal.Specimen;
import org.opencds.vmr.v1_0.internal.SubstanceAdministrationEvent;
import org.opencds.vmr.v1_0.internal.SubstanceAdministrationOrder;
import org.opencds.vmr.v1_0.internal.SubstanceAdministrationProposal;
import org.opencds.vmr.v1_0.internal.SubstanceDispensationEvent;
import org.opencds.vmr.v1_0.internal.SupplyEvent;
import org.opencds.vmr.v1_0.internal.SupplyOrder;
import org.opencds.vmr.v1_0.internal.SupplyProposal;
import org.opencds.vmr.v1_0.internal.UnconductedObservation;
import org.opencds.vmr.v1_0.internal.UndeliveredProcedure;
import org.opencds.vmr.v1_0.internal.UndeliveredSubstanceAdministration;
import org.opencds.vmr.v1_0.internal.UndeliveredSupply;
import org.opencds.vmr.v1_0.internal.VMR;
import org.opencds.vmr.v1_0.mappings.mappers.AdministrableSubstanceMapper;
import org.opencds.vmr.v1_0.mappings.mappers.CDSInputMapper;
import org.opencds.vmr.v1_0.mappings.mappers.ClinicalStatementRelationshipMapper;
import org.opencds.vmr.v1_0.mappings.mappers.EntityMapper;
import org.opencds.vmr.v1_0.mappings.mappers.EntityRelationshipMapper;
import org.opencds.vmr.v1_0.mappings.mappers.EvaluatedPersonMapper;
import org.opencds.vmr.v1_0.mappings.mappers.EvaluatedPersonRelationshipMapper;
import org.opencds.vmr.v1_0.mappings.mappers.FacilityMapper;
import org.opencds.vmr.v1_0.mappings.mappers.OneObjectMapper;
import org.opencds.vmr.v1_0.mappings.mappers.OrganizationMapper;
import org.opencds.vmr.v1_0.mappings.mappers.PersonMapper;
import org.opencds.vmr.v1_0.mappings.mappers.SpecimenMapper;
import org.opencds.vmr.v1_0.mappings.mappers.VMRMapper;
import org.opencds.vmr.v1_0.mappings.utilities.MappingUtility;
/**
* <p>structural mapper to go from external XML data described by vmr.xsd in project opencds-vmr-v1_0-schema
* to internal form based on javaBeans in the project opencds-vmr-v1_0-internal.
*
* Note that this class also accomplishes the following:
* - Based on DOB, populates EvaluatedPersonAgeAtEvalTime (this may be extended in the future to
* not rely on the presence of a DOB and accept, e.g., from Observations about age)
*
* - Populates Concept lists
*
* <p/>
* <p>Copyright: Copyright (c) 2010</p>
* <p>Company: OpenCDS</p>
*
* @authors David Shields, Kensaku Kawamoto, Daryl Chertcoff (developed mappers library)
* @version 2.0
* @date 09-11-2011
*
*/
public class BuildCDSInputFactLists
{
private static Log log = LogFactory.getLog(BuildCDSInputFactLists.class);
public static String buildFactLists(
JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput> cdsInput,
Date evalTime,
Map<String, List<?>> allFactLists,
TimingDataKM timingDataKM
)
throws
InvalidDriDataFormatExceptionFault,
UnrecognizedLanguageExceptionFault,
RequiredDataNotProvidedExceptionFault,
UnsupportedLanguageExceptionFault,
UnrecognizedScopedEntityExceptionFault,
EvaluationExceptionFault,
InvalidTimeZoneOffsetExceptionFault,
DSSRuntimeExceptionFault
{
MappingUtility mu = new MappingUtility();
String focalPersonId = null;
log.debug("buildFactLists");
try {
String subjectPersonId = null;
FactLists factLists = new FactLists();
factLists.clearAllFactLists();
EvalTime evalTimeFact = new EvalTime();
evalTimeFact.setEvalTimeValue(evalTime);
factLists.evalTimeList.add(evalTimeFact);
if ( (cdsInput.getValue() == null) ) {
throw new InvalidDriDataFormatExceptionFault( "Error: No payload within the CDSInput." );
} else {
try {
CDSInput internalCDSInput = new CDSInput();
CDSInputMapper.pullIn(cdsInput.getValue(), internalCDSInput, mu);
factLists.internalCDSInputList.add( internalCDSInput );
focalPersonId = internalCDSInput.getFocalPersonId(); //for later reference, never changes for the entire VMR...
log.debug("BuildCDSInputFactLists for focalPersonId=" + focalPersonId);
FocalPersonId focalPersonIdFact = new FocalPersonId();
focalPersonIdFact.setId(focalPersonId);
factLists.focalPersonIdList.add(focalPersonIdFact);
subjectPersonId = focalPersonId; //same as focal person initially, but will change for each "otherEvaluatedPerson" entry
} catch (Exception e) {
String unknownError = e.getMessage();
e.printStackTrace();
throw new InvalidDriDataFormatExceptionFault("Unknown error initializing BuildCDSInputFactLists: "
+ unknownError + ", therefore unable to complete unmarshalling input Semantic Payload: " + cdsInput.toString() );
}
org.opencds.vmr.v1_0.schema.VMR vmrInput = cdsInput.getValue().getVmrInput();
VMR internalVMR = new VMR();
VMRMapper.pullIn( vmrInput, internalVMR, mu);
factLists.internalVMRList.add( internalVMR );
// ================= Patient =======================//
if ( vmrInput.getPatient() != null ) {
org.opencds.vmr.v1_0.schema.EvaluatedPerson inputPatient = vmrInput.getPatient();
oneEvaluatedPerson(inputPatient, evalTime, subjectPersonId, focalPersonId, factLists);
}
// ================= OtherEvaluatedPersons =======================//
if ( (vmrInput.getOtherEvaluatedPersons() != null) && (vmrInput.getOtherEvaluatedPersons().getEvaluatedPerson() != null)
&& (vmrInput.getOtherEvaluatedPersons().getEvaluatedPerson().size() > 0) ) {
List<org.opencds.vmr.v1_0.schema.EvaluatedPerson> input = vmrInput.getOtherEvaluatedPersons().getEvaluatedPerson();
for ( org.opencds.vmr.v1_0.schema.EvaluatedPerson eachOtherEvaluatedPerson : input ) {
subjectPersonId = MappingUtility.iI2FlatId(eachOtherEvaluatedPerson.getId());
oneEvaluatedPerson(eachOtherEvaluatedPerson, evalTime, subjectPersonId, focalPersonId, factLists);
}
}
// ================= EvaluatedPersonRelationships (for family history relationships, where relatedPersons also have their own vMR) =======================//
if ( (vmrInput.getEvaluatedPersonRelationships() != null) && (vmrInput.getEvaluatedPersonRelationships().getEvaluatedPersonRelationship() != null)
&& (vmrInput.getEvaluatedPersonRelationships().getEvaluatedPersonRelationship().size() > 0) ) {
List<org.opencds.vmr.v1_0.schema.EntityRelationship> input = vmrInput.getEvaluatedPersonRelationships().getEvaluatedPersonRelationship();
for ( org.opencds.vmr.v1_0.schema.EntityRelationship each : input ) {
EvaluatedPersonRelationship internalEvaluatedPersonRelationship = new EvaluatedPersonRelationshipMapper().pullIn(each, mu);
factLists.internalEvaluatedPersonRelationshipList.add( internalEvaluatedPersonRelationship );
}
}
// ================= End of Building all Internal FactLists =======================//
// ================= Populate OpenCDS Concepts through Post-Processing of Internal VMR =======================//
try {
timingDataKM.setFinishBuildFactListsTime(new AtomicLong(System.nanoTime()));
ConceptLists conceptLists = new ConceptLists();
conceptLists.clearAllConceptLists();
BuildOpenCDSConceptLists.buildConceptLists(factLists, conceptLists, allFactLists);
} catch (Exception e) {
String err = e.getMessage();
e.printStackTrace();
throw new InvalidDriDataFormatExceptionFault("BuildOpenCDSConceptLists threw error: " + err + "; " + e);
}
// ================= End Populating Code Concepts =======================//
// ================= Begin Populating AllFactLists object =======================//
factLists.populateAllFactLists(allFactLists);
// ================= End Populating AllFactLists object =======================//
}
} catch (ImproperUsageException e) {
String err = e.getMessage();
e.printStackTrace();
throw new InvalidDriDataFormatExceptionFault("ImproperUsageException error in BuildCDSInputFactLists: "
+ err + ", therefore unable to complete unmarshalling input Semantic Payload: " + cdsInput.toString() );
} catch (DataFormatException e) {
String err = e.getMessage();
e.printStackTrace();
throw new InvalidDriDataFormatExceptionFault("DataFormatException error in BuildCDSInputFactLists: "
+ err + ", therefore unable to complete unmarshalling input Semantic Payload: " + cdsInput.toString() );
} catch (InvalidDataException e) {
String err = e.getMessage();
e.printStackTrace();
throw new InvalidDriDataFormatExceptionFault("InvalidDataException error in BuildCDSInputFactLists: "
+ err + ", therefore unable to complete unmarshalling input Semantic Payload: " + cdsInput.toString() );
}
/**
* The actual output of the mapping is in allFactLists, which is a live I-O input parameter
*/
log.debug("buildFactLists completed for " + focalPersonId);
return focalPersonId;
}
/**
* Process one evaluated person, either the primary patient, or other evaluated persons
*
* @param inputPatient
* @param evalTime
* @param subjectPersonId
* @param focalPersonId
* @param factLists
* @throws ImproperUsageException
* @throws DataFormatException
* @throws InvalidDataException
*/
private static synchronized void oneEvaluatedPerson (
org.opencds.vmr.v1_0.schema.EvaluatedPerson inputPatient,
Date evalTime,
String subjectPersonId,
String focalPersonId,
FactLists factLists
) throws ImproperUsageException, DataFormatException, InvalidDataException
{
EvaluatedPerson internalPatient = new EvaluatedPerson();
EvaluatedPersonMapper.pullIn(inputPatient, internalPatient, null, null, subjectPersonId, focalPersonId, factLists);
// ================= Demographic Data =======================//
if ( inputPatient.getDemographics() != null ) {
// Note that the EvaluatedPersonMapper has already populated all the basic Demographic data
// if Birth Time present, populate EvaluatedPersonAgeAtEvalTime entries
Date birthTime = null;
if (inputPatient.getDemographics().getBirthTime() != null) {
birthTime = MappingUtility.tS2DateInternal(inputPatient.getDemographics().getBirthTime());
populateEvaluatedPersonAgeAtEvalTime(birthTime, evalTime, focalPersonId, factLists.internalPersonAgeAtEvalTimeList);
}
}
factLists.internalEvaluatedPersonList.add(internalPatient);
// ================= End Demographic Data =======================//
// ================= Clinical Statement Relationships =======================//
//Load these first, so that we can check them when we are loading clinical statements
if (inputPatient.getClinicalStatementRelationships() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.ClinicalStatementRelationships inputClinicalStatementRelationships = inputPatient.getClinicalStatementRelationships();
for ( int i = 0; ((inputClinicalStatementRelationships.getClinicalStatementRelationship() != null)
&& (i < inputClinicalStatementRelationships.getClinicalStatementRelationship().size() )); i++ ) {
ClinicalStatementRelationshipMapper.pullIn(inputClinicalStatementRelationships.getClinicalStatementRelationship().get(i), factLists);
}
}
// ================= End Clinical Statement Relationships =======================//
// ================= Clinical Statements =======================//
org.opencds.vmr.v1_0.schema.EvaluatedPerson.ClinicalStatements inputClinicalStatements = inputPatient.getClinicalStatements();
//following "for" statements must be in same order as given in the schema
for ( int i = 0; ((inputClinicalStatements.getAdverseEvents() != null)
&& (i < inputClinicalStatements.getAdverseEvents().getAdverseEvent().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getAdverseEvents().getAdverseEvent().get(i), new AdverseEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getDeniedAdverseEvents() != null)
&& (i < inputClinicalStatements.getDeniedAdverseEvents().getDeniedAdverseEvent().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getDeniedAdverseEvents().getDeniedAdverseEvent().get(i), new DeniedAdverseEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getAppointmentProposals() != null)
&& (i < inputClinicalStatements.getAppointmentProposals().getAppointmentProposal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getAppointmentProposals().getAppointmentProposal().get(i), new AppointmentProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getAppointmentRequests() != null)
&& (i < inputClinicalStatements.getAppointmentRequests().getAppointmentRequest().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getAppointmentRequests().getAppointmentRequest().get(i), new AppointmentRequest(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getEncounterEvents() != null)
&& (i < inputClinicalStatements.getEncounterEvents().getEncounterEvent().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getEncounterEvents().getEncounterEvent().get(i), new EncounterEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getMissedAppointments() != null)
&& (i < inputClinicalStatements.getMissedAppointments().getMissedAppointment().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getMissedAppointments().getMissedAppointment().get(i), new MissedAppointment(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getScheduledAppointments() != null)
&& (i < inputClinicalStatements.getScheduledAppointments().getScheduledAppointment().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getScheduledAppointments().getScheduledAppointment().get(i), new ScheduledAppointment(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getGoals() != null)
&& (i < inputClinicalStatements.getGoals().getGoal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getGoals().getGoal().get(i), new Goal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getGoalProposals() != null)
&& (i < inputClinicalStatements.getGoalProposals().getGoalProposal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getGoalProposals().getGoalProposal().get(i), new GoalProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getObservationOrders() != null)
&& (i < inputClinicalStatements.getObservationOrders().getObservationOrder().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getObservationOrders().getObservationOrder().get(i), new ObservationOrder(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getObservationProposals() != null)
&& (i < inputClinicalStatements.getObservationProposals().getObservationProposal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getObservationProposals().getObservationProposal().get(i), new ObservationProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getObservationResults() != null)
&& (i < inputClinicalStatements.getObservationResults().getObservationResult().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getObservationResults().getObservationResult().get(i), new ObservationResult(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getUnconductedObservations() != null)
&& (i < inputClinicalStatements.getUnconductedObservations().getUnconductedObservation().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getUnconductedObservations().getUnconductedObservation().get(i), new UnconductedObservation(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getDeniedProblems() != null)
&& (i < inputClinicalStatements.getDeniedProblems().getDeniedProblem().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getDeniedProblems().getDeniedProblem().get(i), new DeniedProblem(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getProblems() != null)
&& (i < inputClinicalStatements.getProblems().getProblem().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getProblems().getProblem().get(i), new Problem(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getProcedureEvents() != null)
&& (i < inputClinicalStatements.getProcedureEvents().getProcedureEvent().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getProcedureEvents().getProcedureEvent().get(i), new ProcedureEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getProcedureOrders() != null)
&& (i < inputClinicalStatements.getProcedureOrders().getProcedureOrder().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getProcedureOrders().getProcedureOrder().get(i), new ProcedureOrder(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getProcedureProposals() != null)
&& (i < inputClinicalStatements.getProcedureProposals().getProcedureProposal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getProcedureProposals().getProcedureProposal().get(i), new ProcedureProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getScheduledProcedures() != null)
&& (i < inputClinicalStatements.getScheduledProcedures().getScheduledProcedure().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getScheduledProcedures().getScheduledProcedure().get(i), new ScheduledProcedure(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getUndeliveredProcedures() != null)
&& (i < inputClinicalStatements.getUndeliveredProcedures().getUndeliveredProcedure().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getUndeliveredProcedures().getUndeliveredProcedure().get(i), new UndeliveredProcedure(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSubstanceAdministrationEvents() != null)
&& (i < inputClinicalStatements.getSubstanceAdministrationEvents().getSubstanceAdministrationEvent().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSubstanceAdministrationEvents().getSubstanceAdministrationEvent().get(i), new SubstanceAdministrationEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSubstanceAdministrationOrders() != null)
&& (i < inputClinicalStatements.getSubstanceAdministrationOrders().getSubstanceAdministrationOrder().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSubstanceAdministrationOrders().getSubstanceAdministrationOrder().get(i), new SubstanceAdministrationOrder(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSubstanceAdministrationProposals() != null)
&& (i < inputClinicalStatements.getSubstanceAdministrationProposals().getSubstanceAdministrationProposal().size() )); i++ )
{
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSubstanceAdministrationProposals().getSubstanceAdministrationProposal().get(i), new SubstanceAdministrationProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSubstanceDispensationEvents() != null)
&& (i < inputClinicalStatements.getSubstanceDispensationEvents().getSubstanceDispensationEvent().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSubstanceDispensationEvents().getSubstanceDispensationEvent().get(i), new SubstanceDispensationEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getUndeliveredSubstanceAdministrations() != null)
&& (i < inputClinicalStatements.getUndeliveredSubstanceAdministrations().getUndeliveredSubstanceAdministration().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getUndeliveredSubstanceAdministrations().getUndeliveredSubstanceAdministration().get(i), new UndeliveredSubstanceAdministration(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSupplyEvents() != null)
&& (i < inputClinicalStatements.getSupplyEvents().getSupplyEvent().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSupplyEvents().getSupplyEvent().get(i), new SupplyEvent(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSupplyOrders() != null)
&& (i < inputClinicalStatements.getSupplyOrders().getSupplyOrder().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSupplyOrders().getSupplyOrder().get(i), new SupplyOrder(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getSupplyProposals() != null)
&& (i < inputClinicalStatements.getSupplyProposals().getSupplyProposal().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getSupplyProposals().getSupplyProposal().get(i), new SupplyProposal(), subjectPersonId, focalPersonId, factLists);
}
for ( int i = 0; ((inputClinicalStatements.getUndeliveredSupplies() != null)
&& (i < inputClinicalStatements.getUndeliveredSupplies().getUndeliveredSupply().size() )); i++ ) {
OneObjectMapper.pullInClinicalStatement(inputClinicalStatements.getUndeliveredSupplies().getUndeliveredSupply().get(i), new UndeliveredSupply(), subjectPersonId, focalPersonId, factLists);
}
// ================= End Clinical Statements =======================//
// ================= Entity Lists (related to person's clinical statements) =======================//
if (inputPatient.getEntityLists() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists inputEntityLists = inputPatient.getEntityLists();
if (inputEntityLists.getAdministrableSubstances() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.AdministrableSubstances administrableSubstances = inputEntityLists.getAdministrableSubstances();
for ( int i = 0; ((administrableSubstances.getAdministrableSubstance() != null)
&& (i < administrableSubstances.getAdministrableSubstance().size() )); i++ ) {
AdministrableSubstanceMapper.pullIn(administrableSubstances.getAdministrableSubstance().get(i), new AdministrableSubstance(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
if (inputEntityLists.getEntities() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.Entities oneGroup = inputEntityLists.getEntities();
for ( int i = 0; ((oneGroup.getEntity() != null)
&& (i < oneGroup.getEntity().size() )); i++ ) {
EntityMapper.pullIn(oneGroup.getEntity().get(i), new Entity(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
if (inputEntityLists.getFacilities() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.Facilities oneGroup = inputEntityLists.getFacilities();
for ( int i = 0; ((oneGroup.getFacility() != null)
&& (i < oneGroup.getFacility().size() )); i++ ) {
FacilityMapper.pullIn(oneGroup.getFacility().get(i), new Facility(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
if (inputEntityLists.getOrganizations() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.Organizations oneGroup = inputEntityLists.getOrganizations();
for ( int i = 0; ((oneGroup.getOrganization() != null)
&& (i < oneGroup.getOrganization().size() )); i++ ) {
OrganizationMapper.pullIn(oneGroup.getOrganization().get(i), new Organization(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
if (inputEntityLists.getPersons() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.Persons oneGroup = inputEntityLists.getPersons();
for ( int i = 0; ((oneGroup.getPerson() != null)
&& (i < oneGroup.getPerson().size() )); i++ ) {
PersonMapper.pullIn(oneGroup.getPerson().get(i), new Person(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
if (inputEntityLists.getSpecimens() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityLists.Specimens oneGroup = inputEntityLists.getSpecimens();
for ( int i = 0; ((oneGroup.getSpecimen() != null)
&& (i < oneGroup.getSpecimen().size() )); i++ ) {
SpecimenMapper.pullIn(oneGroup.getSpecimen().get(i), new Specimen(), null, null, subjectPersonId, focalPersonId, factLists);
}
}
}
// ================= End Entities =======================//
// ================= Entity to Entity Relationships =======================//
if (inputPatient.getEntityRelationships() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.EntityRelationships inputEntityRelationships = inputPatient.getEntityRelationships();
for ( int i = 0; ((inputEntityRelationships.getEntityRelationship() != null)
&& (i < inputEntityRelationships.getEntityRelationship().size() )); i++ ) {
org.opencds.vmr.v1_0.schema.EntityRelationship inputEntityRelationship = inputEntityRelationships.getEntityRelationship().get(i);
EntityRelationshipMapper.pullIn(inputEntityRelationship.getSourceId(), inputEntityRelationship.getTargetEntityId(), inputEntityRelationship.getTargetRole(), inputEntityRelationship.getRelationshipTimeInterval(), factLists);
}
}
// ================= End Entity to Entity Relationships =======================//
// ================= Clinical Statement Entity In Role Relationships =======================//
if (inputPatient.getClinicalStatementEntityInRoleRelationships() != null)
{
org.opencds.vmr.v1_0.schema.EvaluatedPerson.ClinicalStatementEntityInRoleRelationships inputClinicalStatementEntityInRoleRelationships = inputPatient.getClinicalStatementEntityInRoleRelationships();
for ( int i = 0; ((inputClinicalStatementEntityInRoleRelationships.getClinicalStatementEntityInRoleRelationship() != null)
&& (i < inputClinicalStatementEntityInRoleRelationships.getClinicalStatementEntityInRoleRelationship().size() )); i++ ) {
org.opencds.vmr.v1_0.schema.EntityRelationship inputEntityRelationship = inputClinicalStatementEntityInRoleRelationships.getClinicalStatementEntityInRoleRelationship().get(i);
// ClinicalStatementEntityInRoleRelationshipMapper has been deprecated in favor of EntityRelationshipMapper des 2012-02-12
// ClinicalStatementEntityInRoleRelationshipMapper.pullIn(inputClinicalStatementEntityInRoleRelationship, subjectPersonId, focalPersonId, factLists);
EntityRelationshipMapper.pullIn(inputEntityRelationship.getSourceId(), inputEntityRelationship.getTargetEntityId(), inputEntityRelationship.getTargetRole(), inputEntityRelationship.getRelationshipTimeInterval(), factLists);
}
}
// ================= End Clinical Statement Entity Relationships =======================//
// ================= End of Building FactLists from One Evaluated Person =======================//
}
/**
* Populates internal PersonAgeAtEvalTimeList if all input values are non-null and evalTime after birthTime.
*
* NOTE: precision of calculation of age is determined by the last parameter of the call to AbsoluteTimeDifference.
* The following ages are calculated based on whole dates, ignoring all submitted time values (hours and lower):
* year, month, week, day
*
* The following ages are calculated with the full submitted precision of the dateTime value:
* hour, minute, second
*
* @param birthTime
* @param evalTime
* @param internalSubjectPersonId
* @param internalPersonAgeAtEvalTimeList
*/
private static synchronized void populateEvaluatedPersonAgeAtEvalTime(Date birthTime, Date evalTime, String internalSubjectPersonId, List<EvaluatedPersonAgeAtEvalTime> internalPersonAgeAtEvalTimeList)
{
if ((birthTime != null) && (evalTime != null) && (internalSubjectPersonId != null) && (internalPersonAgeAtEvalTimeList != null) && (evalTime.after(birthTime)))
{
DateUtility dateUtility = DateUtility.getInstance();
AbsoluteTimeDifference tdYear = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.YEAR, true, Calendar.HOUR);
AbsoluteTimeDifference tdMonth = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.MONTH, true, Calendar.HOUR);
// NOTE: using time difference in days to calculate time difference in weeks as days / 7
// AbsoluteTimeDifference tdWeekAsDay = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.DAY_OF_YEAR, true, Calendar.HOUR);
AbsoluteTimeDifference tdDay = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.DAY_OF_YEAR, true, Calendar.HOUR);
AbsoluteTimeDifference tdHour = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.HOUR, false, -1);
AbsoluteTimeDifference tdMinute = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.MINUTE, false, -1);
AbsoluteTimeDifference tdSecond = dateUtility.getAbsoluteTimeDifference(evalTime, birthTime, Calendar.SECOND, false, -1);
EvaluatedPersonAgeAtEvalTime personAgeInYears = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInMonths = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInWeeks = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInDays = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInHours = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInMinutes = new EvaluatedPersonAgeAtEvalTime();
EvaluatedPersonAgeAtEvalTime personAgeInSeconds = new EvaluatedPersonAgeAtEvalTime();
personAgeInYears.setAge(new Integer((int) tdYear.getYearDifference()));
personAgeInMonths.setAge(new Integer((int) tdMonth.getMonthDifference()));
personAgeInWeeks.setAge(new Integer((int) (tdDay.getDayDifference() / 7) )); //NOTE: week is expressed as days / 7
personAgeInDays.setAge(new Integer((int) tdDay.getDayDifference()));
personAgeInHours.setAge(new Integer((int) tdHour.getHourDifference()));
personAgeInMinutes.setAge(new Integer((int) tdMinute.getMinuteDifference()));
personAgeInSeconds.setAge(new Integer((int) tdSecond.getSecondDifference()));
personAgeInYears.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_YEAR);
personAgeInMonths.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_MONTH);
personAgeInWeeks.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_WEEK);
personAgeInDays.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_DAY);
personAgeInHours.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_HOUR);
personAgeInMinutes.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_MINUTE);
personAgeInSeconds.setAgeUnit(EvaluatedPersonAgeAtEvalTime.AGE_UNIT_SECOND);
personAgeInYears.setPersonId(internalSubjectPersonId);
personAgeInMonths.setPersonId(internalSubjectPersonId);
personAgeInWeeks.setPersonId(internalSubjectPersonId);
personAgeInDays.setPersonId(internalSubjectPersonId);
personAgeInHours.setPersonId(internalSubjectPersonId);
personAgeInMinutes.setPersonId(internalSubjectPersonId);
personAgeInSeconds.setPersonId(internalSubjectPersonId);
internalPersonAgeAtEvalTimeList.add(personAgeInYears);
internalPersonAgeAtEvalTimeList.add(personAgeInMonths);
internalPersonAgeAtEvalTimeList.add(personAgeInWeeks);
internalPersonAgeAtEvalTimeList.add(personAgeInDays);
internalPersonAgeAtEvalTimeList.add(personAgeInHours);
internalPersonAgeAtEvalTimeList.add(personAgeInMinutes);
internalPersonAgeAtEvalTimeList.add(personAgeInSeconds);
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ql.execution.search.extractor;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.ql.QlIllegalArgumentException;
import org.elasticsearch.xpack.ql.type.DataType;
import org.elasticsearch.xpack.ql.type.DataTypes;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Extractor for ES fields. Works for both 'normal' fields but also nested ones (which require hitName to be set).
* The latter is used as metadata in assembling the results in the tabular response.
*/
public abstract class AbstractFieldHitExtractor implements HitExtractor {
private final String fieldName, hitName;
private final DataType dataType;
private final ZoneId zoneId;
private final boolean arrayLeniency;
protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId) {
this(name, dataType, zoneId, null, false);
}
protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean arrayLeniency) {
this(name, dataType, zoneId, null, arrayLeniency);
}
protected AbstractFieldHitExtractor(String name, DataType dataType, ZoneId zoneId, String hitName, boolean arrayLeniency) {
this.fieldName = name;
this.dataType = dataType;
this.zoneId = zoneId;
this.arrayLeniency = arrayLeniency;
this.hitName = hitName;
if (hitName != null) {
if (name.contains(hitName) == false) {
throw new QlIllegalArgumentException("Hitname [{}] specified but not part of the name [{}]", hitName, name);
}
}
}
protected AbstractFieldHitExtractor(StreamInput in) throws IOException {
fieldName = in.readString();
String typeName = in.readOptionalString();
dataType = typeName != null ? loadTypeFromName(typeName) : null;
hitName = in.readOptionalString();
arrayLeniency = in.readBoolean();
zoneId = readZoneId(in);
}
protected DataType loadTypeFromName(String typeName) {
return DataTypes.fromTypeName(typeName);
}
protected abstract ZoneId readZoneId(StreamInput in) throws IOException;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeOptionalString(dataType == null ? null : dataType.typeName());
out.writeOptionalString(hitName);
out.writeBoolean(arrayLeniency);
}
@Override
public Object extract(SearchHit hit) {
Object value = null;
DocumentField field = null;
if (hitName != null) {
value = unwrapFieldsMultiValue(extractNestedField(hit));
} else {
field = hit.field(fieldName);
if (field != null) {
value = unwrapFieldsMultiValue(field.getValues());
}
}
return value;
}
/*
* For a path of fields like root.nested1.nested2.leaf where nested1 and nested2 are nested field types,
* fieldName is root.nested1.nested2.leaf, while hitName is root.nested1.nested2
* We first look for root.nested1.nested2 or root.nested1 or root in the SearchHit until we find something.
* If the DocumentField lives under "root.nested1" the remaining path to search for (in the DocumentField itself) is nested2.
* After this step is done, what remains to be done is just getting the leaf values.
*/
@SuppressWarnings("unchecked")
private Object extractNestedField(SearchHit hit) {
Object value;
DocumentField field;
String tempHitname = hitName;
List<String> remainingPath = new ArrayList<>();
// first, search for the "root" DocumentField under which the remaining path of nested document values is
while ((field = hit.field(tempHitname)) == null) {
int indexOfDot = tempHitname.lastIndexOf(".");
if (indexOfDot < 0) {// there is no such field in the hit
return null;
}
remainingPath.add(0, tempHitname.substring(indexOfDot + 1));
tempHitname = tempHitname.substring(0, indexOfDot);
}
// then dig into DocumentField's structure until we reach the field we are interested into
if (remainingPath.size() > 0) {
List<Object> values = field.getValues();
Iterator<String> pathIterator = remainingPath.iterator();
while (pathIterator.hasNext()) {
String pathElement = pathIterator.next();
Map<String, List<Object>> elements = (Map<String, List<Object>>) values.get(0);
values = elements.get(pathElement);
/*
* if this path is not found it means we hit another nested document (inner_root_1.inner_root_2.nested_field_2)
* something like this
* "root_field_1.root_field_2.nested_field_1" : [
* {
* "inner_root_1.inner_root_2.nested_field_2" : [
* {
* "leaf_field" : [
* "abc2"
* ]
* So, start re-building the path until the right one is found, ie inner_root_1.inner_root_2......
*/
while (values == null) {
pathElement += "." + pathIterator.next();
values = elements.get(pathElement);
}
}
value = ((Map<String, Object>) values.get(0)).get(fieldName.substring(hitName.length() + 1));
} else {
value = field.getValues();
}
return value;
}
protected Object unwrapFieldsMultiValue(Object values) {
if (values == null) {
return null;
}
if (values instanceof Map && hitName != null) {
// extract the sub-field from a nested field (dep.dep_name -> dep_name)
return unwrapFieldsMultiValue(((Map<?, ?>) values).get(fieldName.substring(hitName.length() + 1)));
}
if (values instanceof List<?> list) {
if (list.isEmpty()) {
return null;
} else {
if (isPrimitive(list) == false) {
if (list.size() == 1 || arrayLeniency) {
return unwrapFieldsMultiValue(list.get(0));
} else {
throw new QlIllegalArgumentException("Arrays (returned by [{}]) are not supported", fieldName);
}
}
}
}
Object unwrapped = unwrapCustomValue(values);
if (unwrapped != null) {
return unwrapped;
}
return values;
}
protected abstract Object unwrapCustomValue(Object values);
protected abstract boolean isPrimitive(List<?> list);
@Override
public String hitName() {
return hitName;
}
public String fieldName() {
return fieldName;
}
public ZoneId zoneId() {
return zoneId;
}
public DataType dataType() {
return dataType;
}
public boolean arrayLeniency() {
return arrayLeniency;
}
@Override
public String toString() {
return fieldName + "@" + hitName + "@" + zoneId;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
AbstractFieldHitExtractor other = (AbstractFieldHitExtractor) obj;
return fieldName.equals(other.fieldName) && hitName.equals(other.hitName) && arrayLeniency == other.arrayLeniency;
}
@Override
public int hashCode() {
return Objects.hash(fieldName, hitName, arrayLeniency);
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.ResolvedTargets;
import com.google.devtools.build.lib.cmdline.TargetParsingException;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.events.StoredEventHandler;
import com.google.devtools.build.lib.packages.util.PackageLoadingTestCase;
import com.google.devtools.build.lib.pkgcache.LoadingOptions;
import com.google.devtools.build.lib.pkgcache.TargetProvider;
import com.google.devtools.build.lib.pkgcache.TestFilter;
import com.google.devtools.build.lib.skyframe.TestSuiteExpansionValue;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.skyframe.EvaluationResult;
import com.google.devtools.build.skyframe.SkyKey;
import java.util.Collection;
import java.util.EnumSet;
import java.util.function.Predicate;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
@RunWith(JUnit4.class)
public class TestTargetUtilsTest extends PackageLoadingTestCase {
private Target test1;
private Target test2;
private Target test1b;
private Target suite;
@Before
public final void createTargets() throws Exception {
scratch.file(
"tests/BUILD",
"py_test(name = 'small_test_1',",
" srcs = ['small_test_1.py'],",
" data = [':xUnit'],",
" size = 'small',",
" tags = ['tag1'])",
"",
"sh_test(name = 'small_test_2',",
" srcs = ['small_test_2.sh'],",
" data = ['//testing/shbase:googletest.sh'],",
" size = 'small',",
" tags = ['tag2'])",
"",
"sh_test(name = 'large_test_1',",
" srcs = ['large_test_1.sh'],",
" data = ['//testing/shbase:googletest.sh', ':xUnit'],",
" size = 'large',",
" tags = ['tag1'])",
"",
"py_binary(name = 'notest',",
" srcs = ['notest.py'])",
"cc_library(name = 'xUnit', data = ['//tools:test_sharding_compliant'])",
"",
"test_suite( name = 'smallTests', tags=['small'])");
test1 = getTarget("//tests:small_test_1");
test2 = getTarget("//tests:small_test_2");
test1b = getTarget("//tests:large_test_1");
suite = getTarget("//tests:smallTests");
}
@Test
public void testFilterBySize() throws Exception {
Predicate<Target> sizeFilter =
TestFilter.testSizeFilter(EnumSet.of(TestSize.SMALL, TestSize.LARGE));
assertThat(sizeFilter.test(test1)).isTrue();
assertThat(sizeFilter.test(test2)).isTrue();
assertThat(sizeFilter.test(test1b)).isTrue();
sizeFilter = TestFilter.testSizeFilter(EnumSet.of(TestSize.SMALL));
assertThat(sizeFilter.test(test1)).isTrue();
assertThat(sizeFilter.test(test2)).isTrue();
assertThat(sizeFilter.test(test1b)).isFalse();
}
@Test
public void testFilterByLang() throws Exception {
StoredEventHandler eventHandler = new StoredEventHandler();
LoadingOptions options = new LoadingOptions();
options.testLangFilterList = ImmutableList.of("nonexistent", "existent", "-noexist", "-exist");
options.testSizeFilterSet = ImmutableSet.of();
options.testTimeoutFilterSet = ImmutableSet.of();
options.testTagFilterList = ImmutableList.of();
TestFilter filter =
TestFilter.forOptions(
options, eventHandler, ImmutableSet.of("existent_test", "exist_test"));
assertThat(eventHandler.getEvents()).hasSize(2);
Package pkg = Mockito.mock(Package.class);
RuleClass ruleClass = Mockito.mock(RuleClass.class);
Rule mockRule =
new Rule(
pkg,
null,
ruleClass,
Location.fromPathFragment(PathFragment.EMPTY_FRAGMENT),
new AttributeContainer(ruleClass));
Mockito.when(ruleClass.getName()).thenReturn("existent_library");
assertThat(filter.apply(mockRule)).isTrue();
Mockito.when(ruleClass.getName()).thenReturn("exist_library");
assertThat(filter.apply(mockRule)).isFalse();
assertThat(eventHandler.getEvents())
.contains(Event.warn("Unknown language 'nonexistent' in --test_lang_filters option"));
assertThat(eventHandler.getEvents())
.contains(Event.warn("Unknown language 'noexist' in --test_lang_filters option"));
}
@Test
public void testFilterByTimeout() throws Exception {
scratch.file(
"timeouts/BUILD",
"sh_test(name = 'long_timeout',",
" srcs = ['a.sh'],",
" size = 'small',",
" timeout = 'long')",
"sh_test(name = 'short_timeout',",
" srcs = ['b.sh'],",
" size = 'small')",
"sh_test(name = 'moderate_timeout',",
" srcs = ['c.sh'],",
" size = 'small',",
" timeout = 'moderate')");
Target longTest = getTarget("//timeouts:long_timeout");
Target shortTest = getTarget("//timeouts:short_timeout");
Target moderateTest = getTarget("//timeouts:moderate_timeout");
Predicate<Target> timeoutFilter =
TestFilter.testTimeoutFilter(EnumSet.of(TestTimeout.SHORT, TestTimeout.LONG));
assertThat(timeoutFilter.test(longTest)).isTrue();
assertThat(timeoutFilter.test(shortTest)).isTrue();
assertThat(timeoutFilter.test(moderateTest)).isFalse();
}
@Test
public void testExpandTestSuites() throws Exception {
assertExpandedSuites(Sets.newHashSet(test1, test2), Sets.newHashSet(test1, test2));
assertExpandedSuites(Sets.newHashSet(test1, test2), Sets.newHashSet(suite));
assertExpandedSuites(
Sets.newHashSet(test1, test2, test1b), Sets.newHashSet(test1, suite, test1b));
// The large test if returned as filtered from the test_suite rule, but should still be in the
// result set as it's explicitly added.
assertExpandedSuites(
Sets.newHashSet(test1, test2, test1b), ImmutableSet.<Target>of(test1b, suite));
}
@Test
public void testSkyframeExpandTestSuites() throws Exception {
assertExpandedSuitesSkyframe(
Sets.newHashSet(test1, test2), ImmutableSet.<Target>of(test1, test2));
assertExpandedSuitesSkyframe(Sets.newHashSet(test1, test2), ImmutableSet.<Target>of(suite));
assertExpandedSuitesSkyframe(
Sets.newHashSet(test1, test2, test1b), ImmutableSet.<Target>of(test1, suite, test1b));
// The large test if returned as filtered from the test_suite rule, but should still be in the
// result set as it's explicitly added.
assertExpandedSuitesSkyframe(
Sets.newHashSet(test1, test2, test1b), ImmutableSet.<Target>of(test1b, suite));
}
@Test
public void testExpandTestSuitesKeepGoing() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("broken/BUILD", "test_suite(name = 'broken', tests = ['//missing:missing_test'])");
ResolvedTargets<Target> actual =
TestTargetUtils.expandTestSuites(
getPackageManager(),
reporter,
Sets.newHashSet(getTarget("//broken")),
/*strict=*/ false,
/* keepGoing= */ true);
assertThat(actual.hasError()).isTrue();
assertThat(actual.getTargets()).isEmpty();
}
private void assertExpandedSuites(Iterable<Target> expected, Collection<Target> suites)
throws Exception {
ResolvedTargets<Target> actual =
TestTargetUtils.expandTestSuites(
getPackageManager(), reporter, suites, /*strict=*/ false, /* keepGoing= */ true);
assertThat(actual.hasError()).isFalse();
assertThat(actual.getTargets()).containsExactlyElementsIn(expected);
}
private static final Function<Target, Label> TO_LABEL =
new Function<Target, Label>() {
@Override
public Label apply(Target input) {
return input.getLabel();
}
};
private void assertExpandedSuitesSkyframe(Iterable<Target> expected, Collection<Target> suites)
throws Exception {
ImmutableSet<Label> expectedLabels =
ImmutableSet.copyOf(Iterables.transform(expected, TO_LABEL));
ImmutableSet<Label> suiteLabels = ImmutableSet.copyOf(Iterables.transform(suites, TO_LABEL));
SkyKey key = TestSuiteExpansionValue.key(suiteLabels);
EvaluationResult<TestSuiteExpansionValue> result =
getSkyframeExecutor()
.getDriverForTesting()
.evaluate(ImmutableList.of(key), false, 1, reporter);
ResolvedTargets<Label> actual = result.get(key).getLabels();
assertThat(actual.hasError()).isFalse();
assertThat(actual.getTargets()).containsExactlyElementsIn(expectedLabels);
}
@Test
public void testExpandTestSuitesInterrupted() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("broken/BUILD", "test_suite(name = 'broken', tests = ['//missing:missing_test'])");
try {
TestTargetUtils.expandTestSuites(
new TargetProvider() {
@Override
public Target getTarget(ExtendedEventHandler eventHandler, Label label)
throws InterruptedException {
throw new InterruptedException();
}
},
reporter,
Sets.newHashSet(getTarget("//broken")),
/*strict=*/ false,
/* keepGoing= */ true);
} catch (TargetParsingException e) {
assertThat(e).hasMessageThat().isNotNull();
}
assertThat(Thread.currentThread().isInterrupted()).isTrue();
}
}
| |
/*
* Copyright 2013 Tomasz Konopka.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package thesaurus.make;
import java.util.ArrayList;
import java.util.Comparator;
/**
* A class storing a parsed psl entry from BLAT.
*
* This code is adapted from code from the Bamformatics project
*
*
* @author tkonopka
* @deprecated no longer needed because ThesaurusBlat is deprecated
*
*/
@Deprecated
class PslEntry {
int match = 0;
int mismatch = 0;
int repmatch = 0;
int Ns = 0;
int Qgapcount = 0;
int Qgapbases = 0;
int Tgapcount = 0;
int Tgapbases = 0;
char strand = '+';
String Qname = null;
int Qsize = 0;
int Qstart = 0;
int Qend = 0;
String Tname = null;
int Tsize = 0;
int Tstart = 0;
int Tend = 0;
int blockcount = 0;
ArrayList<Integer> blockSizes = new ArrayList<Integer>(8);
ArrayList<Integer> qStarts = new ArrayList<Integer>(8);
ArrayList<Integer> tStarts = new ArrayList<Integer>(8);
// not strictly in the psl format, but can be included here
byte[] sequence;
byte[] qualities;
/**
* generic constructor that doesn't change any of the default values
*/
public PslEntry() {
}
/**
*
* @param entry
*
* A long string, tab separated, output from blat
*
*/
public PslEntry(String entry) {
// parse the long string into tab separated tokens
String[] tokens = entry.split("\t");
if (tokens.length < 21) {
System.out.println("Invalid psl entry. Less than 21 columns");
return;
}
// convert the tokens into values for the PslEntry
match = Integer.parseInt(tokens[0]);
mismatch = Integer.parseInt(tokens[1]);
repmatch = Integer.parseInt(tokens[2]);
Ns = Integer.parseInt(tokens[3]);
Qgapcount = Integer.parseInt(tokens[4]);
Qgapbases = Integer.parseInt(tokens[5]);
Tgapcount = Integer.parseInt(tokens[6]);
Tgapbases = Integer.parseInt(tokens[7]);
strand = tokens[8].charAt(0);
Qname = tokens[9];
Qsize = Integer.parseInt(tokens[10]);
Qstart = Integer.parseInt(tokens[11]);
Qend = Integer.parseInt(tokens[12]);
Tname = tokens[13];
Tsize = Integer.parseInt(tokens[14]);
Tstart = Integer.parseInt(tokens[15]);
Tend = Integer.parseInt(tokens[16]);
blockcount = Integer.parseInt(tokens[17]);
String[] bstokens = tokens[18].split(",");
String[] qstokens = tokens[19].split(",");
String[] tstokens = tokens[20].split(",");
for (int i = 0; i < bstokens.length; i++) {
blockSizes.add(Integer.parseInt(bstokens[i]));
qStarts.add(Integer.parseInt(qstokens[i]));
tStarts.add(Integer.parseInt(tstokens[i]));
}
// trim the ending of the query name if it contains /1 or /2
int qnamelen = Qname.length();
if (Qname.substring(qnamelen - 2, qnamelen).equals("/1") || Qname.substring(qnamelen - 2, qnamelen).equals("/2")) {
Qname = Qname.substring(0, qnamelen - 2);
}
}
public String getEntryString() {
StringBuilder sb = new StringBuilder(256);
sb.append(match).append("\t");
sb.append(mismatch).append("\t");
sb.append(repmatch).append("\t");
sb.append(Ns).append("\t");
sb.append(Qgapcount).append("\t");
sb.append(Qgapbases).append("\t");
sb.append(Tgapcount).append("\t");
sb.append(Tgapbases).append("\t");
sb.append(strand).append("\t");
sb.append(Qname).append("\t");
sb.append(Qsize).append("\t");
sb.append(Qstart).append("\t");
sb.append(Qend).append("\t");
sb.append(Tname).append("\t");
sb.append(Tsize).append("\t");
sb.append(Tstart).append("\t");
sb.append(Tend).append("\t");
sb.append(blockcount).append("\t");
for (int i = 0; i < blockSizes.size(); i++) {
sb.append(blockSizes.get(i)).append(",");
}
sb.append("\t");
for (int i = 0; i < qStarts.size(); i++) {
sb.append(qStarts.get(i)).append(",");
}
sb.append("\t");
for (int i = 0; i < tStarts.size(); i++) {
sb.append(tStarts.get(i)).append(",");
}
return sb.toString();
}
public String makeCigar() {
StringBuilder cigar = new StringBuilder(8);
// keep track of the current position along the query
int querypos = this.qStarts.get(0);
//int transcriptpos = entry.tStarts.get(0);
int numblocks = this.blockSizes.size();
// check if there is initial trimming
if (querypos > 0) {
cigar.append(querypos).append("S");
}
// append the middle blocks
for (int i = 1; i < numblocks; i++) {
int nowqpos = this.qStarts.get(i);
int nowtpos = this.tStarts.get(i);
int prevtpos = this.tStarts.get(i - 1);
int prevqpos = this.qStarts.get(i - 1);
int prevblock = this.blockSizes.get(i - 1);
int nowinsert = nowqpos - prevblock - prevqpos;
int nowdelete = nowtpos - prevtpos - prevblock;
// append the match cigar, which is the same always
cigar.append(prevblock).append("M");
// then append the insert and delete cigars
if (nowinsert > 0) {
// it's an insertion
cigar.append(nowinsert).append("I");
}
if (nowdelete > 0) {
// there is a deletion
cigar.append(nowdelete).append("D");
}
querypos = nowqpos;
}
// append the last matching subsequence cigar
cigar.append(this.blockSizes.get(numblocks - 1)).append("M");
querypos += this.blockSizes.get(numblocks - 1);
// check if there is final position clipping
if (querypos < this.Qsize) {
cigar.append(this.Qsize - querypos).append("S");
}
return cigar.toString();
}
}
/**
* comparator for Psl, used when sorting Psl entries, in decreasing score order.
*
*
* @author tomasz
*/
class PslComparator implements Comparator {
@Override
public int compare(Object o1, Object o2) {
int match1 = ((PslEntry) o1).match;
int match2 = ((PslEntry) o2).match;
if (match1 < match2) {
return 1;
} else if (match1 > match2) {
return -1;
} else {
return 0;
}
}
}
| |
/*
* Copyright (C) 2012 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.mamasdelrio.android.widgets;
import org.javarosa.core.model.data.IAnswerData;
import org.javarosa.form.api.FormEntryPrompt;
import org.mamasdelrio.android.R;
import org.mamasdelrio.android.application.Collect;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.View;
import android.widget.Button;
import android.widget.TableLayout;
import android.widget.Toast;
/**
* <p>Use the ODK Sensors framework to print data to a connected printer.</p>
*
* <p>The default button text is "Print Label"
*
* <p>You may override the button text and the error text that is
* displayed when the app is missing by using jr:itext() values. The
* special itext form values are 'buttonText' and 'noPrinterErrorString',
* respectively.</p>
*
* <p>To use via XLSForm, specify a 'note' type with a 'calculation' that defines
* the data to be printed and with an 'appearance' as described below.
*
* <p>Within the XForms XML, to use this widget, define an appearance on the
* <input/> tag that begins "printer:" and then contains the intent
* action to launch. That intent starts the printer app. The data to print
* is sent via a broadcast intent to intentname.data The printer then pops
* a UI to initiate the actual printing (or change the destination printer).
* </p>
*
* <p>Implementation-wise, this widget is an ExStringWidget that is read-only.</p>
*
* <p>The ODK Sensors Zebra printer uses this appearance (intent):</p>
* <pre>
* "printer:org.opendatakit.sensors.ZebraPrinter"
* </pre>
*
* <p>The data that is printed should be defined in the calculate attribute
* of the bind. The structure of that string is a <br> separated list
* of values consisting of:</p>
* <ul><li>numeric barcode to emit (optional)</li>
* <li>string qrcode to emit (optional)</li>
* <li>text line 1 (optional)</li>
* <li>additional text line (repeat as needed)</li></ul>
*
* <p>E.g., if you wanted to emit a barcode of 123, a qrcode of "mycode" and
* two text lines of "line 1" and "line 2", you would define the calculate
* as:</p>
*
* <pre>
* <bind nodeset="/printerForm/printme" type="string" readonly="true()"
* calculate="concat('123','<br>','mycode','<br>','line 1','<br>','line 2')" />
* </pre>
*
* <p>Depending upon what you supply, the printer may print just a
* barcode, just a qrcode, just text, or some combination of all 3.</p>
*
* <p>Despite using <br> as a separator, the supplied Zebra
* printer does not recognize html.</p>
*
* <pre>
* <input appearance="ex:change.uw.android.TEXTANSWER" ref="/printerForm/printme" >
* </pre>
* <p>or, to customize the button text and error strings with itext:
* <pre>
* ...
* <bind nodeset="/printerForm/printme" type="string" readonly="true()" calculate="concat('<br>',
* /printerForm/some_text ,'<br>Text: ', /printerForm/shortened_text ,'<br>Integer: ',
* /printerForm/a_integer ,'<br>Decimal: ', /printerForm/a_decimal )"/>
* ...
* <itext>
* <translation lang="English">
* <text id="printAnswer">
* <value form="short">Print your label</value>
* <value form="long">Print your label</value>
* <value form="buttonText">Print now</value>
* <value form="noPrinterErrorString">ODK Sensors Zebra Printer is not installed!
* Please install ODK Sensors Framework and ODK Sensors Zebra Printer from Google Play.</value>
* </text>
* </translation>
* </itext>
* ...
* <input appearance="printer:org.opendatakit.sensors.ZebraPrinter" ref="/form/printme">
* <label ref="jr:itext('printAnswer')"/>
* </input>
* </pre>
*
* @author mitchellsundt@gmail.com
*
*/
public class ExPrinterWidget extends QuestionWidget implements IBinaryWidget {
private Button mLaunchIntentButton;
public ExPrinterWidget(Context context, FormEntryPrompt prompt) {
super(context, prompt);
TableLayout.LayoutParams params = new TableLayout.LayoutParams();
params.setMargins(7, 5, 7, 5);
String appearance = prompt.getAppearanceHint();
String[] attrs = appearance.split(":");
final String intentName = (attrs.length < 2 || attrs[1].length() == 0) ? "org.opendatakit.sensors.ZebraPrinter" : attrs[1];
final String buttonText;
final String errorString;
String v = mPrompt.getSpecialFormQuestionText("buttonText");
buttonText = (v != null) ? v : context.getString(R.string.launch_printer);
v = mPrompt.getSpecialFormQuestionText("noPrinterErrorString");
errorString = (v != null) ? v : context.getString(R.string.no_printer);
// set button formatting
mLaunchIntentButton = new Button(getContext());
mLaunchIntentButton.setId(QuestionWidget.newUniqueId());
mLaunchIntentButton.setText(buttonText);
mLaunchIntentButton.setTextSize(TypedValue.COMPLEX_UNIT_DIP, mAnswerFontsize);
mLaunchIntentButton.setPadding(20, 20, 20, 20);
mLaunchIntentButton.setLayoutParams(params);
mLaunchIntentButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
Collect.getInstance().getFormController().setIndexWaitingForData(mPrompt.getIndex());
firePrintingActivity(intentName);
} catch (ActivityNotFoundException e) {
Collect.getInstance().getFormController().setIndexWaitingForData(null);
Toast.makeText(getContext(),
errorString, Toast.LENGTH_SHORT)
.show();
}
}
});
// finish complex layout
addView(mLaunchIntentButton);
}
protected void firePrintingActivity(String intentName) throws ActivityNotFoundException {
String s = mPrompt.getAnswerText();
Collect.getInstance().getActivityLogger().logInstanceAction(this, "launchPrinter",
intentName, mPrompt.getIndex());
Intent i = new Intent(intentName);
((Activity) getContext()).startActivity(i);
String[] splits;
if ( s != null ) {
splits = s.split("<br>");
} else {
splits = null;
}
Bundle printDataBundle = new Bundle();
String e;
if (splits != null) {
if ( splits.length >= 1 ) {
e = splits[0];
if ( e.length() > 0) {
printDataBundle.putString("BARCODE", e);
}
}
if ( splits.length >= 2 ) {
e = splits[1];
if ( e.length() > 0) {
printDataBundle.putString("QRCODE", e);
}
}
if ( splits.length > 2 ) {
String[] text = new String[splits.length-2];
for ( int j = 2 ; j < splits.length ; ++j ) {
e = splits[j];
text[j-2] = e;
}
printDataBundle.putStringArray("TEXT-STRINGS", text);
}
}
//send the printDataBundle to the activity via broadcast intent
Intent bcastIntent = new Intent(intentName + ".data");
bcastIntent.putExtra("DATA", printDataBundle);
((Activity) getContext()).sendBroadcast(bcastIntent);
}
@Override
public void clearAnswer() {
}
@Override
public IAnswerData getAnswer() {
return mPrompt.getAnswerValue();
}
/**
* Allows answer to be set externally in {@Link FormEntryActivity}.
*/
@Override
public void setBinaryData(Object answer) {
Collect.getInstance().getFormController().setIndexWaitingForData(null);
}
@Override
public void setFocus(Context context) {
// focus on launch button
mLaunchIntentButton.requestFocus();
}
@Override
public boolean isWaitingForBinaryData() {
return mPrompt.getIndex().equals(Collect.getInstance().getFormController().getIndexWaitingForData());
}
@Override
public void cancelWaitingForBinaryData() {
Collect.getInstance().getFormController().setIndexWaitingForData(null);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (event.isAltPressed() == true) {
return false;
}
return super.onKeyDown(keyCode, event);
}
@Override
public void setOnLongClickListener(OnLongClickListener l) {
mLaunchIntentButton.setOnLongClickListener(l);
}
@Override
public void cancelLongPress() {
super.cancelLongPress();
mLaunchIntentButton.cancelLongPress();
}
}
| |
/*
* Copyright (C) the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.jini.print.attribute.standard;
import javax.print.attribute.Attribute;
import javax.print.attribute.EnumSyntax;
/**
* <p>
* The "media-key" member attribute contains the name of the media represented
* as a keyword or name. Values MUST be the same as the keyword and name values
* for the "media" Job Template attribute and represent the same media, except
* for media size and input tray keywords (see section 6.3 in this document and
* [RFC2911] Appendix C) which MUST NOT be "media-key" values.
* </p><p>
* The value of this member attribute MUST be unique for each media supported by
* an IPP Printer instance, i.e., no two media instances can have the same
* "media-key" value on the same IPP Printer instance. However, the same
* "media-key" value can represent the same or different media on different IPP
* Printer instances. For example, the 'iso-a4-white' keyword might represent
* recycled 80 gm/mm on two Printer instances and non-recycled, 72 gm/mm on a
* third Printer instance. An administrator or a number of administrators within
* an organization MAY choose to have "media-key" values represent the same
* media instances across a set of Printers.
* </p><p>
* Note: Since the above requires that each media instance have a unique
* "media-key" value (if "media-key" attribute is supported), then the Printer
* automatically meets the requirement (see section 3.13) that each media
* instance have a unique combination of member attribute values.
* </p><p>
* Note: As with any combination of supported "media-col" member attributes, if
* a client supplies the "media-key" member attribute and other member
* attributes, the Printer will attempt to match all of the supplied member
* attributes, including the "media-key" value, following the algorithm defined
* in section 3.13. So if the supplied collection value does not match any
* supported "media-col" value, the Printer treats the "media-col" attribute as
* having an undefined attribute value. Thus, a client can ensure that the
* Printer maps a standard media name keyword to certain expected member
* attribute values.
* </p><p>
* The "media-key-supported" (1setOf (type3 keyword | name(MAX))) Printer
* attribute identifies the values of this "media-key" member attribute that the
* Printer supports.
* </p><p>
* For Printers that support a large number of media (and the "media-key"
* attribute), the burden of an administrator to define unique "media-key"
* values for each media instance could be quite large. Therefore, it is
* RECOMMENDED that such a Printer assign a unique "media-key" value in an
* IMPLEMENTATION-DEFINED manner for each media instance for which the
* administrator has not defined a "media-key" value, rather than refusing the
* media definition. The Printer also adds such generated values to its
* "media-key-supported" attribute. A client can supply such a Printer-generated
* value with either (1) the "media-key" member attribute or (2) the "media" Job
* Template attribute.
* </p>
*
* @see MediaCollection
* @see MediaKeySupported
*/
public class MediaKey extends EnumSyntax
implements Attribute {
private static final long serialVersionUID = 1L;
public static final MediaKey DEFAULT = new MediaKey(0);
public static final MediaKey ISO_A4_WHITE = new MediaKey(1);
public static final MediaKey ISO_A4_COLORED = new MediaKey(2);
public static final MediaKey ISO_A4_TRANSPARENT = new MediaKey(3);
public static final MediaKey ISO_A3_WHITE = new MediaKey(4);
public static final MediaKey ISO_A3_COLORED = new MediaKey(5);
public static final MediaKey ISO_A5_WHITE = new MediaKey(6);
public static final MediaKey ISO_A5_COLORED = new MediaKey(7);
public static final MediaKey ISO_B4_WHITE = new MediaKey(8);
public static final MediaKey ISO_B4_COLORED = new MediaKey(9);
public static final MediaKey ISO_B5_WHITE = new MediaKey(10);
public static final MediaKey ISO_B5_COLORED = new MediaKey(11);
public static final MediaKey JIS_B4_WHITE = new MediaKey(12);
public static final MediaKey JIS_B4_COLORED = new MediaKey(13);
public static final MediaKey JIS_B5_WHITE = new MediaKey(14);
public static final MediaKey JIS_B5_COLORED = new MediaKey(15);
public static final MediaKey NA_LETTER_WHITE = new MediaKey(16);
public static final MediaKey NA_LETTER_COLORED = new MediaKey(17);
public static final MediaKey NA_LETTER_TRANSPARENT = new MediaKey(18);
public static final MediaKey NA_LEGAL_WHITE = new MediaKey(19);
public static final MediaKey NA_LEGAL_COLORED = new MediaKey(20);
public static final MediaKey ISO_B4_ENVELOPE = new MediaKey(21);
public static final MediaKey ISO_B5_ENVELOPE = new MediaKey(22);
public static final MediaKey ISO_C3_ENVELOPE = new MediaKey(23);
public static final MediaKey ISO_C4_ENVELOPE = new MediaKey(24);
public static final MediaKey ISO_C5_ENVELOPE = new MediaKey(25);
public static final MediaKey ISO_C6_ENVELOPE = new MediaKey(26);
public static final MediaKey ISO_DESIGNATED_LONG_ENVELOPE = new MediaKey(27);
public static final MediaKey NA_10X13_ENVELOPE = new MediaKey(28);
public static final MediaKey NA_9X12_ENVELOPE = new MediaKey(29);
public static final MediaKey MONARCH_ENVELOPE = new MediaKey(30);
public static final MediaKey NA_NUMBER_10_ENVELOPE = new MediaKey(31);
public static final MediaKey NA_7X9_ENVELOPE = new MediaKey(32);
public static final MediaKey NA_9X11_ENVELOPE = new MediaKey(33);
public static final MediaKey NA_10X14_ENVELOPE = new MediaKey(34);
public static final MediaKey NA_NUMBER_9_ENVELOPE = new MediaKey(35);
public static final MediaKey NA_6X9_ENVELOPE = new MediaKey(36);
public static final MediaKey NA_10X15_ENVELOPE = new MediaKey(37);
public static final MediaKey EXECUTIVE_WHITE = new MediaKey(38);
public static final MediaKey FOLIO_WHITE = new MediaKey(39);
public static final MediaKey INVOICE_WHITE = new MediaKey(40);
public static final MediaKey LEDGER_WHITE = new MediaKey(41);
public static final MediaKey QUARTO_WHITE = new MediaKey(42);
public static final MediaKey ISO_A0_WHITE = new MediaKey(43);
public static final MediaKey ISO_A0_TRANSPARENT = new MediaKey(44);
public static final MediaKey ISO_A0_TRANSLUCENT = new MediaKey(45);
public static final MediaKey ISO_A1_WHITE = new MediaKey(46);
public static final MediaKey ISO_A1_TRANSPARENT = new MediaKey(47);
public static final MediaKey ISO_A1_TRANSLUCENT = new MediaKey(48);
public static final MediaKey ISO_A2_WHITE = new MediaKey(49);
public static final MediaKey ISO_A2_TRANSPARENT = new MediaKey(50);
public static final MediaKey ISO_A2_TRANSLUCENT = new MediaKey(51);
public static final MediaKey ISO_A3_TRANSPARENT = new MediaKey(52);
public static final MediaKey ISO_A3_TRANSLUCENT = new MediaKey(53);
public static final MediaKey ISO_A4_TRANSLUCENT = new MediaKey(54);
public static final MediaKey ISO_A5_TRANSPARENT = new MediaKey(55);
public static final MediaKey ISO_A5_TRANSLUCENT = new MediaKey(56);
public static final MediaKey ISO_A6_WHITE = new MediaKey(57);
public static final MediaKey ISO_A7_WHITE = new MediaKey(58);
public static final MediaKey ISO_A8_WHITE = new MediaKey(59);
public static final MediaKey ISO_A9_WHITE = new MediaKey(60);
public static final MediaKey ISO_A10_WHITE = new MediaKey(61);
public static final MediaKey ISO_B0_WHITE = new MediaKey(62);
public static final MediaKey ISO_B1_WHITE = new MediaKey(63);
public static final MediaKey ISO_B2_WHITE = new MediaKey(64);
public static final MediaKey ISO_B3_WHITE = new MediaKey(65);
public static final MediaKey ISO_B6_WHITE = new MediaKey(66);
public static final MediaKey ISO_B7_WHITE = new MediaKey(67);
public static final MediaKey ISO_B8_WHITE = new MediaKey(68);
public static final MediaKey ISO_B9_WHITE = new MediaKey(69);
public static final MediaKey ISO_B10_WHITE = new MediaKey(70);
public static final MediaKey JIS_B0_WHITE = new MediaKey(71);
public static final MediaKey JIS_B0_TRANSPARENT = new MediaKey(72);
public static final MediaKey JIS_B0_TRANSLUCENT = new MediaKey(73);
public static final MediaKey JIS_B1_WHITE = new MediaKey(74);
public static final MediaKey JIS_B1_TRANSPARENT = new MediaKey(75);
public static final MediaKey JIS_B1_TRANSLUCENT = new MediaKey(76);
public static final MediaKey JIS_B2_WHITE = new MediaKey(77);
public static final MediaKey JIS_B2_TRANSPARENT = new MediaKey(78);
public static final MediaKey JIS_B2_TRANSLUCENT = new MediaKey(79);
public static final MediaKey JIS_B3_WHITE = new MediaKey(80);
public static final MediaKey JIS_B3_TRANSPARENT = new MediaKey(81);
public static final MediaKey JIS_B3_TRANSLUCENT = new MediaKey(82);
public static final MediaKey JIS_B4_TRANSPARENT = new MediaKey(83);
public static final MediaKey JIS_B4_TRANSLUCENT = new MediaKey(84);
public static final MediaKey JIS_B5_TRANSPARENT = new MediaKey(85);
public static final MediaKey JIS_B5_TRANSLUCENT = new MediaKey(86);
public static final MediaKey JIS_B6_WHITE = new MediaKey(87);
public static final MediaKey JIS_B7_WHITE = new MediaKey(88);
public static final MediaKey JIS_B8_WHITE = new MediaKey(89);
public static final MediaKey JIS_B9_WHITE = new MediaKey(90);
public static final MediaKey JIS_B10_WHITE = new MediaKey(91);
public static final MediaKey A_WHITE = new MediaKey(92);
public static final MediaKey A_TRANSPARENT = new MediaKey(93);
public static final MediaKey A_TRANSLUCENT = new MediaKey(94);
public static final MediaKey B_WHITE = new MediaKey(95);
public static final MediaKey B_TRANSPARENT = new MediaKey(96);
public static final MediaKey B_TRANSLUCENT = new MediaKey(97);
public static final MediaKey C_WHITE = new MediaKey(98);
public static final MediaKey C_TRANSPARENT = new MediaKey(99);
public static final MediaKey C_TRANSLUCENT = new MediaKey(100);
public static final MediaKey D_WHITE = new MediaKey(101);
public static final MediaKey D_TRANSPARENT = new MediaKey(102);
public static final MediaKey D_TRANSLUCENT = new MediaKey(103);
public static final MediaKey E_WHITE = new MediaKey(104);
public static final MediaKey E_TRANSPARENT = new MediaKey(105);
public static final MediaKey E_TRANSLUCENT = new MediaKey(106);
public static final MediaKey AXSYNCHRO_WHITE = new MediaKey(107);
public static final MediaKey AXSYNCHRO_TRANSPARENT = new MediaKey(108);
public static final MediaKey AXSYNCHRO_TRANSLUCENT = new MediaKey(109);
public static final MediaKey BXSYNCHRO_WHITE = new MediaKey(110);
public static final MediaKey BXSYNCHRO_TRANSPARENT = new MediaKey(111);
public static final MediaKey BXSYNCHRO_TRANSLUCENT = new MediaKey(112);
public static final MediaKey CXSYNCHRO_WHITE = new MediaKey(113);
public static final MediaKey CXSYNCHRO_TRANSPARENT = new MediaKey(114);
public static final MediaKey CXSYNCHRO_TRANSLUCENT = new MediaKey(115);
public static final MediaKey DXSYNCHRO_WHITE = new MediaKey(116);
public static final MediaKey DXSYNCHRO_TRANSPARENT = new MediaKey(117);
public static final MediaKey DXSYNCHRO_TRANSLUCENT = new MediaKey(118);
public static final MediaKey EXSYNCHRO_WHITE = new MediaKey(119);
public static final MediaKey EXSYNCHRO_TRANSPARENT = new MediaKey(120);
public static final MediaKey EXSYNCHRO_TRANSLUCENT = new MediaKey(121);
public static final MediaKey ARCH_A_WHITE = new MediaKey(122);
public static final MediaKey ARCH_A_TRANSPARENT = new MediaKey(123);
public static final MediaKey ARCH_A_TRANSLUCENT = new MediaKey(124);
public static final MediaKey ARCH_B_WHITE = new MediaKey(125);
public static final MediaKey ARCH_B_TRANSPARENT = new MediaKey(126);
public static final MediaKey ARCH_B_TRANSLUCENT = new MediaKey(127);
public static final MediaKey ARCH_C_WHITE = new MediaKey(128);
public static final MediaKey ARCH_C_TRANSPARENT = new MediaKey(129);
public static final MediaKey ARCH_C_TRANSLUCENT = new MediaKey(130);
public static final MediaKey ARCH_D_WHITE = new MediaKey(131);
public static final MediaKey ARCH_D_TRANSPARENT = new MediaKey(132);
public static final MediaKey ARCH_D_TRANSLUCENT = new MediaKey(133);
public static final MediaKey ARCH_E_WHITE = new MediaKey(134);
public static final MediaKey ARCH_E_TRANSPARENT = new MediaKey(135);
public static final MediaKey ARCH_E_TRANSLUCENT = new MediaKey(136);
public static final MediaKey ARCH_AXSYNCHRO_WHITE = new MediaKey(137);
public static final MediaKey ARCH_AXSYNCHRO_TRANSPARENT = new MediaKey(138);
public static final MediaKey ARCH_AXSYNCHRO_TRANSLUCENT = new MediaKey(139);
public static final MediaKey ARCH_BXSYNCHRO_WHITE = new MediaKey(140);
public static final MediaKey ARCH_BXSYNCHRO_TRANSPARENT = new MediaKey(141);
public static final MediaKey ARCH_BXSYNCHRO_TRANSLUCENT = new MediaKey(142);
public static final MediaKey ARCH_CXSYNCHRO_WHITE = new MediaKey(143);
public static final MediaKey ARCH_CXSYNCHRO_TRANSPARENT = new MediaKey(144);
public static final MediaKey ARCH_CXSYNCHRO_TRANSLUCENT = new MediaKey(145);
public static final MediaKey ARCH_DXSYNCHRO_WHITE = new MediaKey(146);
public static final MediaKey ARCH_DXSYNCHRO_TRANSPARENT = new MediaKey(147);
public static final MediaKey ARCH_DXSYNCHRO_TRANSLUCENT = new MediaKey(148);
public static final MediaKey ARCH_EXSYNCHRO_WHITE = new MediaKey(149);
public static final MediaKey ARCH_EXSYNCHRO_TRANSPARENT = new MediaKey(150);
public static final MediaKey ARCH_EXSYNCHRO_TRANSLUCENT = new MediaKey(151);
public static final MediaKey ISO_A1X3_WHITE = new MediaKey(152);
public static final MediaKey ISO_A1X3_TRANSPARENT = new MediaKey(153);
public static final MediaKey ISO_A1X3_TRANSLUCENT = new MediaKey(154);
public static final MediaKey ISO_A1X4_WHITE = new MediaKey(155);
public static final MediaKey ISO_A1X4_TRANSPARENT = new MediaKey(156);
public static final MediaKey ISO_A1X4_TRANSLUCENT = new MediaKey(157);
public static final MediaKey ISO_A2X3_WHITE = new MediaKey(158);
public static final MediaKey ISO_A2X3_TRANSPARENT = new MediaKey(159);
public static final MediaKey ISO_A2X3_TRANSLUCENT = new MediaKey(160);
public static final MediaKey ISO_A2X4_WHITE = new MediaKey(161);
public static final MediaKey ISO_A2X4_TRANSPARENT = new MediaKey(162);
public static final MediaKey ISO_A2X4_TRANSLUCENT = new MediaKey(163);
public static final MediaKey ISO_A2X5_WHITE = new MediaKey(164);
public static final MediaKey ISO_A2X5_TRANSPARENT = new MediaKey(165);
public static final MediaKey ISO_A2X5_TRANSLUCENT = new MediaKey(166);
public static final MediaKey ISO_A3X3_WHITE = new MediaKey(167);
public static final MediaKey ISO_A3X3_TRANSPARENT = new MediaKey(168);
public static final MediaKey ISO_A3X3_TRANSLUCENT = new MediaKey(169);
public static final MediaKey ISO_A3X4_WHITE = new MediaKey(170);
public static final MediaKey ISO_A3X4_TRANSPARENT = new MediaKey(171);
public static final MediaKey ISO_A3X4_TRANSLUCENT = new MediaKey(172);
public static final MediaKey ISO_A3X5_WHITE = new MediaKey(173);
public static final MediaKey ISO_A3X5_TRANSPARENT = new MediaKey(174);
public static final MediaKey ISO_A3X5_TRANSLUCENT = new MediaKey(175);
public static final MediaKey ISO_A3X6_WHITE = new MediaKey(176);
public static final MediaKey ISO_A3X6_TRANSPARENT = new MediaKey(177);
public static final MediaKey ISO_A3X6_TRANSLUCENT = new MediaKey(178);
public static final MediaKey ISO_A3X7_WHITE = new MediaKey(179);
public static final MediaKey ISO_A3X7_TRANSPARENT = new MediaKey(180);
public static final MediaKey ISO_A3X7_TRANSLUCENT = new MediaKey(181);
public static final MediaKey ISO_A4X3_WHITE = new MediaKey(182);
public static final MediaKey ISO_A4X3_TRANSPARENT = new MediaKey(183);
public static final MediaKey ISO_A4X3_TRANSLUCENT = new MediaKey(184);
public static final MediaKey ISO_A4X4_WHITE = new MediaKey(185);
public static final MediaKey ISO_A4X4_TRANSPARENT = new MediaKey(186);
public static final MediaKey ISO_A4X4_TRANSLUCENT = new MediaKey(187);
public static final MediaKey ISO_A4X5_WHITE = new MediaKey(188);
public static final MediaKey ISO_A4X5_TRANSPARENT = new MediaKey(189);
public static final MediaKey ISO_A4X5_TRANSLUCENT = new MediaKey(190);
public static final MediaKey ISO_A4X6_WHITE = new MediaKey(191);
public static final MediaKey ISO_A4X6_TRANSPARENT = new MediaKey(192);
public static final MediaKey ISO_A4X6_TRANSLUCENT = new MediaKey(193);
public static final MediaKey ISO_A4X7_WHITE = new MediaKey(194);
public static final MediaKey ISO_A4X7_TRANSPARENT = new MediaKey(195);
public static final MediaKey ISO_A4X7_TRANSLUCENT = new MediaKey(196);
public static final MediaKey ISO_A4X8_WHITE = new MediaKey(197);
public static final MediaKey ISO_A4X8_TRANSPARENT = new MediaKey(198);
public static final MediaKey ISO_A4X8_TRANSLUCENT = new MediaKey(199);
public static final MediaKey ISO_A4X9_WHITE = new MediaKey(200);
public static final MediaKey ISO_A4X9_TRANSPARENT = new MediaKey(201);
public static final MediaKey ISO_A4X9_TRANSLUCENT = new MediaKey(202);
public static final MediaKey ISO_A0XSYNCHRO_WHITE = new MediaKey(203);
public static final MediaKey ISO_A0XSYNCHRO_TRANSPARENT = new MediaKey(204);
public static final MediaKey ISO_A0XSYNCHRO_TRANSLUCENT = new MediaKey(205);
public static final MediaKey ISO_A1XSYNCHRO_WHITE = new MediaKey(206);
public static final MediaKey ISO_A1XSYNCHRO_TRANSPARENT = new MediaKey(207);
public static final MediaKey ISO_A1XSYNCHRO_TRANSLUCENT = new MediaKey(208);
public static final MediaKey ISO_A2XSYNCHRO_WHITE = new MediaKey(209);
public static final MediaKey ISO_A2XSYNCHRO_TRANSPARENT = new MediaKey(210);
public static final MediaKey ISO_A2XSYNCHRO_TRANSLUCENT = new MediaKey(211);
public static final MediaKey ISO_A3XSYNCHRO_WHITE = new MediaKey(212);
public static final MediaKey ISO_A3XSYNCHRO_TRANSPARENT = new MediaKey(213);
public static final MediaKey ISO_A3XSYNCHRO_TRANSLUCENT = new MediaKey(214);
public static final MediaKey ISO_A4XSYNCHRO_WHITE = new MediaKey(215);
public static final MediaKey ISO_A4XSYNCHRO_TRANSPARENT = new MediaKey(216);
public static final MediaKey ISO_A4XSYNCHRO_TRANSLUCENT = new MediaKey(217);
public static final MediaKey AUTO_WHITE = new MediaKey(218);
public static final MediaKey AUTO_TRANSPARENT = new MediaKey(219);
public static final MediaKey AUTO_TRANSLUCENT = new MediaKey(220);
public static final MediaKey AUTO_FIXED_SIZE_WHITE = new MediaKey(221);
public static final MediaKey AUTO_FIXED_SIZE_TRANSPARENT = new MediaKey(222);
public static final MediaKey AUTO_FIXED_SIZE_TRANSLUCENT = new MediaKey(223);
public static final MediaKey AUTO_SYNCHRO_WHITE = new MediaKey(224);
public static final MediaKey AUTO_SYNCHRO_TRANSPARENT = new MediaKey(225);
public static final MediaKey AUTO_SYNCHRO_TRANSLUCENT = new MediaKey(226);
public static final MediaKey CUSTOM1 = new MediaKey(227);//[PWG5100.3]
public static final MediaKey CUSTOM2 = new MediaKey(228);//[PWG5100.3]
public static final MediaKey CUSTOM3 = new MediaKey(229);//[PWG5100.3]
public static final MediaKey CUSTOM4 = new MediaKey(230);//[PWG5100.3]
public static final MediaKey CUSTOM5 = new MediaKey(231);//[PWG5100.3]
public static final MediaKey CUSTOM6 = new MediaKey(232);//[PWG5100.3]
public static final MediaKey CUSTOM7 = new MediaKey(233);//[PWG5100.3]
public static final MediaKey CUSTOM8 = new MediaKey(234);//[PWG5100.3]
public static final MediaKey CUSTOM9 = new MediaKey(235);//[PWG5100.3]
public static final MediaKey CUSTOM10 = new MediaKey(236);//[PWG5100.3]
public static final MediaKey BOND = new MediaKey(237);//[PWG5100.3]
public static final MediaKey HEAVYWEIGHT = new MediaKey(238);//[PWG5100.3]
public static final MediaKey LABELS = new MediaKey(239);//[PWG5100.3]
public static final MediaKey LETTERHEAD = new MediaKey(240);//[PWG5100.3]
public static final MediaKey PLAIN = new MediaKey(241);//[PWG5100.3]
public static final MediaKey PRE_PRINTED = new MediaKey(242);//[PWG5100.3]
public static final MediaKey PRE_PUNCHED = new MediaKey(243);//[PWG5100.3]
public static final MediaKey RECYCLED = new MediaKey(244);//[PWG5100.3]
public static final MediaKey TRANSPARENCY = new MediaKey(245);//[PWG5100.3]
protected MediaKey(int value) {
super(value);
}
@Override
public Class<? extends Attribute> getCategory() {
return MediaKey.class;
}
@Override
public String getName() {
return "media-key";
}
@Override
protected String[] getStringTable() {
return new String[]{
"default",
"iso-a4-white",
"iso-a4-colored",
"iso-a4-transparent",
"iso-a3-white",
"iso-a3-colored",
"iso-a5-white",
"iso-a5-colored",
"iso-b4-white",
"iso-b4-colored",
"iso-b5-white",
"iso-b5-colored",
"jis-b4-white",
"jis-b4-colored",
"jis-b5-white",
"jis-b5-colored",
"na-letter-white",
"na-letter-colored",
"na-letter-transparent",
"na-legal-white",
"na-legal-colored",
"iso-b4-envelope",
"iso-b5-envelope",
"iso-c3-envelope",
"iso-c4-envelope",
"iso-c5-envelope",
"iso-c6-envelope",
"iso-designated-long-envelope",
"na-10x13-envelope",
"na-9x12-envelope",
"monarch-envelope",
"na-number-10-envelope",
"na-7x9-envelope",
"na-9x11-envelope",
"na-10x14-envelope",
"na-number-9-envelope",
"na-6x9-envelope",
"na-10x15-envelope",
"executive-white",
"folio-white",
"invoice-white",
"ledger-white",
"quarto-white",
"iso-a0-white",
"iso-a0-transparent",
"iso-a0-translucent",
"iso-a1-white",
"iso-a1-transparent",
"iso-a1-translucent",
"iso-a2-white",
"iso-a2-transparent",
"iso-a2-translucent",
"iso-a3-transparent",
"iso-a3-translucent",
"iso-a4-translucent",
"iso-a5-transparent",
"iso-a5-translucent",
"iso-a6-white",
"iso-a7-white",
"iso-a8-white",
"iso-a9-white",
"iso-a10-white",
"iso-b0-white",
"iso-b1-white",
"iso-b2-white",
"iso-b3-white",
"iso-b6-white",
"iso-b7-white",
"iso-b8-white",
"iso-b9-white",
"iso-b10-white",
"jis-b0-white",
"jis-b0-transparent",
"jis-b0-translucent",
"jis-b1-white",
"jis-b1-transparent",
"jis-b1-translucent",
"jis-b2-white",
"jis-b2-transparent",
"jis-b2-translucent",
"jis-b3-white",
"jis-b3-transparent",
"jis-b3-translucent",
"jis-b4-transparent",
"jis-b4-translucent",
"jis-b5-transparent",
"jis-b5-translucent",
"jis-b6-white",
"jis-b7-white",
"jis-b8-white",
"jis-b9-white",
"jis-b10-white",
"a-white",
"a-transparent",
"a-translucent",
"b-white",
"b-transparent",
"b-translucent",
"c-white",
"c-transparent",
"c-translucent",
"d-white",
"d-transparent",
"d-translucent",
"e-white",
"e-transparent",
"e-translucent",
"axsynchro-white",
"axsynchro-transparent",
"axsynchro-translucent",
"bxsynchro-white",
"bxsynchro-transparent",
"bxsynchro-translucent",
"cxsynchro-white",
"cxsynchro-transparent",
"cxsynchro-translucent",
"dxsynchro-white",
"dxsynchro-transparent",
"dxsynchro-translucent",
"exsynchro-white",
"exsynchro-transparent",
"exsynchro-translucent",
"arch-a-white",
"arch-a-transparent",
"arch-a-translucent",
"arch-b-white",
"arch-b-transparent",
"arch-b-translucent",
"arch-c-white",
"arch-c-transparent",
"arch-c-translucent",
"arch-d-white",
"arch-d-transparent",
"arch-d-translucent",
"arch-e-white",
"arch-e-transparent",
"arch-e-translucent",
"arch-axsynchro-white",
"arch-axsynchro-transparent",
"arch-axsynchro-translucent",
"arch-bxsynchro-white",
"arch-bxsynchro-transparent",
"arch-bxsynchro-translucent",
"arch-cxsynchro-white",
"arch-cxsynchro-transparent",
"arch-cxsynchro-translucent",
"arch-dxsynchro-white",
"arch-dxsynchro-transparent",
"arch-dxsynchro-translucent",
"arch-exsynchro-white",
"arch-exsynchro-transparent",
"arch-exsynchro-translucent",
"iso-a1x3-white",
"iso-a1x3-transparent",
"iso-a1x3-translucent",
"iso-a1x4-white",
"iso-a1x4-transparent",
"iso-a1x4- translucent",
"iso-a2x3-white",
"iso-a2x3-transparent",
"iso-a2x3-translucent",
"iso-a2x4-white",
"iso-a2x4-transparent",
"iso-a2x4-translucent",
"iso-a2x5-white",
"iso-a2x5-transparent",
"iso-a2x5-translucent",
"iso-a3x3-white",
"iso-a3x3-transparent",
"iso-a3x3-translucent",
"iso-a3x4-white",
"iso-a3x4-transparent",
"iso-a3x4-translucent",
"iso-a3x5-white",
"iso-a3x5-transparent",
"iso-a3x5-translucent",
"iso-a3x6-white",
"iso-a3x6-transparent",
"iso-a3x6-translucent",
"iso-a3x7-white",
"iso-a3x7-transparent",
"iso-a3x7-translucent",
"iso-a4x3-white",
"iso-a4x3-transparent",
"iso-a4x3-translucent",
"iso-a4x4-white",
"iso-a4x4-transparent",
"iso-a4x4-translucent",
"iso-a4x5-white",
"iso-a4x5-transparent",
"iso-a4x5-translucent",
"iso-a4x6-white",
"iso-a4x6-transparent",
"iso-a4x6-translucent",
"iso-a4x7-white",
"iso-a4x7-transparent",
"iso-a4x7-translucent",
"iso-a4x8-white",
"iso-a4x8-transparent",
"iso-a4x8-translucent",
"iso-a4x9-white",
"iso-a4x9-transparent",
"iso-a4x9-translucent",
"iso-a0xsynchro-white",
"iso-a0xsynchro-transparent",
"iso-a0xsynchro-translucent",
"iso-a1xsynchro-white",
"iso-a1xsynchro-transparent",
"iso-a1xsynchro-translucent",
"iso-a2xsynchro-white",
"iso-a2xsynchro-transparent",
"iso-a2xsynchro-translucent",
"iso-a3xsynchro-white",
"iso-a3xsynchro-transparent",
"iso-a3xsynchro-translucent",
"iso-a4xsynchro-white",
"iso-a4xsynchro-transparent",
"iso-a4xsynchro-translucent",
"auto-white",
"auto-transparent",
"auto-translucent",
"auto-fixed-size-white",
"auto-fixed-size-transparent",
"auto-fixed-size-translucent",
"auto-synchro-white",
"auto-synchro-transparent",
"auto-synchro-translucent",
"custom1",
"custom2",
"custom3",
"custom4",
"custom5",
"custom6",
"custom7",
"custom8",
"custom9",
"custom10",
"bond",
"heavyweight",
"labels",
"letterhead",
"plain",
"pre-printed",
"pre-punched",
"recycled",
"transparency",};
}
@Override
protected EnumSyntax[] getEnumValueTable() {
return new EnumSyntax[]{
DEFAULT,
ISO_A4_WHITE,
ISO_A4_COLORED,
ISO_A4_TRANSPARENT,
ISO_A3_WHITE,
ISO_A3_COLORED,
ISO_A5_WHITE,
ISO_A5_COLORED,
ISO_B4_WHITE,
ISO_B4_COLORED,
ISO_B5_WHITE,
ISO_B5_COLORED,
JIS_B4_WHITE,
JIS_B4_COLORED,
JIS_B5_WHITE,
JIS_B5_COLORED,
NA_LETTER_WHITE,
NA_LETTER_COLORED,
NA_LETTER_TRANSPARENT,
NA_LEGAL_WHITE,
NA_LEGAL_COLORED,
ISO_B4_ENVELOPE,
ISO_B5_ENVELOPE,
ISO_C3_ENVELOPE,
ISO_C4_ENVELOPE,
ISO_C5_ENVELOPE,
ISO_C6_ENVELOPE,
ISO_DESIGNATED_LONG_ENVELOPE,
NA_10X13_ENVELOPE,
NA_9X12_ENVELOPE,
MONARCH_ENVELOPE,
NA_NUMBER_10_ENVELOPE,
NA_7X9_ENVELOPE,
NA_9X11_ENVELOPE,
NA_10X14_ENVELOPE,
NA_NUMBER_9_ENVELOPE,
NA_6X9_ENVELOPE,
NA_10X15_ENVELOPE,
EXECUTIVE_WHITE,
FOLIO_WHITE,
INVOICE_WHITE,
LEDGER_WHITE,
QUARTO_WHITE,
ISO_A0_WHITE,
ISO_A0_TRANSPARENT,
ISO_A0_TRANSLUCENT,
ISO_A1_WHITE,
ISO_A1_TRANSPARENT,
ISO_A1_TRANSLUCENT,
ISO_A2_WHITE,
ISO_A2_TRANSPARENT,
ISO_A2_TRANSLUCENT,
ISO_A3_TRANSPARENT,
ISO_A3_TRANSLUCENT,
ISO_A4_TRANSLUCENT,
ISO_A5_TRANSPARENT,
ISO_A5_TRANSLUCENT,
ISO_A6_WHITE,
ISO_A7_WHITE,
ISO_A8_WHITE,
ISO_A9_WHITE,
ISO_A10_WHITE,
ISO_B0_WHITE,
ISO_B1_WHITE,
ISO_B2_WHITE,
ISO_B3_WHITE,
ISO_B6_WHITE,
ISO_B7_WHITE,
ISO_B8_WHITE,
ISO_B9_WHITE,
ISO_B10_WHITE,
JIS_B0_WHITE,
JIS_B0_TRANSPARENT,
JIS_B0_TRANSLUCENT,
JIS_B1_WHITE,
JIS_B1_TRANSPARENT,
JIS_B1_TRANSLUCENT,
JIS_B2_WHITE,
JIS_B2_TRANSPARENT,
JIS_B2_TRANSLUCENT,
JIS_B3_WHITE,
JIS_B3_TRANSPARENT,
JIS_B3_TRANSLUCENT,
JIS_B4_TRANSPARENT,
JIS_B4_TRANSLUCENT,
JIS_B5_TRANSPARENT,
JIS_B5_TRANSLUCENT,
JIS_B6_WHITE,
JIS_B7_WHITE,
JIS_B8_WHITE,
JIS_B9_WHITE,
JIS_B10_WHITE,
A_WHITE,
A_TRANSPARENT,
A_TRANSLUCENT,
B_WHITE,
B_TRANSPARENT,
B_TRANSLUCENT,
C_WHITE,
C_TRANSPARENT,
C_TRANSLUCENT,
D_WHITE,
D_TRANSPARENT,
D_TRANSLUCENT,
E_WHITE,
E_TRANSPARENT,
E_TRANSLUCENT,
AXSYNCHRO_WHITE,
AXSYNCHRO_TRANSPARENT,
AXSYNCHRO_TRANSLUCENT,
BXSYNCHRO_WHITE,
BXSYNCHRO_TRANSPARENT,
BXSYNCHRO_TRANSLUCENT,
CXSYNCHRO_WHITE,
CXSYNCHRO_TRANSPARENT,
CXSYNCHRO_TRANSLUCENT,
DXSYNCHRO_WHITE,
DXSYNCHRO_TRANSPARENT,
DXSYNCHRO_TRANSLUCENT,
EXSYNCHRO_WHITE,
EXSYNCHRO_TRANSPARENT,
EXSYNCHRO_TRANSLUCENT,
ARCH_A_WHITE,
ARCH_A_TRANSPARENT,
ARCH_A_TRANSLUCENT,
ARCH_B_WHITE,
ARCH_B_TRANSPARENT,
ARCH_B_TRANSLUCENT,
ARCH_C_WHITE,
ARCH_C_TRANSPARENT,
ARCH_C_TRANSLUCENT,
ARCH_D_WHITE,
ARCH_D_TRANSPARENT,
ARCH_D_TRANSLUCENT,
ARCH_E_WHITE,
ARCH_E_TRANSPARENT,
ARCH_E_TRANSLUCENT,
ARCH_AXSYNCHRO_WHITE,
ARCH_AXSYNCHRO_TRANSPARENT,
ARCH_AXSYNCHRO_TRANSLUCENT,
ARCH_BXSYNCHRO_WHITE,
ARCH_BXSYNCHRO_TRANSPARENT,
ARCH_BXSYNCHRO_TRANSLUCENT,
ARCH_CXSYNCHRO_WHITE,
ARCH_CXSYNCHRO_TRANSPARENT,
ARCH_CXSYNCHRO_TRANSLUCENT,
ARCH_DXSYNCHRO_WHITE,
ARCH_DXSYNCHRO_TRANSPARENT,
ARCH_DXSYNCHRO_TRANSLUCENT,
ARCH_EXSYNCHRO_WHITE,
ARCH_EXSYNCHRO_TRANSPARENT,
ARCH_EXSYNCHRO_TRANSLUCENT,
ISO_A1X3_WHITE,
ISO_A1X3_TRANSPARENT,
ISO_A1X3_TRANSLUCENT,
ISO_A1X4_WHITE,
ISO_A1X4_TRANSPARENT,
ISO_A1X4_TRANSLUCENT,
ISO_A2X3_WHITE,
ISO_A2X3_TRANSPARENT,
ISO_A2X3_TRANSLUCENT,
ISO_A2X4_WHITE,
ISO_A2X4_TRANSPARENT,
ISO_A2X4_TRANSLUCENT,
ISO_A2X5_WHITE,
ISO_A2X5_TRANSPARENT,
ISO_A2X5_TRANSLUCENT,
ISO_A3X3_WHITE,
ISO_A3X3_TRANSPARENT,
ISO_A3X3_TRANSLUCENT,
ISO_A3X4_WHITE,
ISO_A3X4_TRANSPARENT,
ISO_A3X4_TRANSLUCENT,
ISO_A3X5_WHITE,
ISO_A3X5_TRANSPARENT,
ISO_A3X5_TRANSLUCENT,
ISO_A3X6_WHITE,
ISO_A3X6_TRANSPARENT,
ISO_A3X6_TRANSLUCENT,
ISO_A3X7_WHITE,
ISO_A3X7_TRANSPARENT,
ISO_A3X7_TRANSLUCENT,
ISO_A4X3_WHITE,
ISO_A4X3_TRANSPARENT,
ISO_A4X3_TRANSLUCENT,
ISO_A4X4_WHITE,
ISO_A4X4_TRANSPARENT,
ISO_A4X4_TRANSLUCENT,
ISO_A4X5_WHITE,
ISO_A4X5_TRANSPARENT,
ISO_A4X5_TRANSLUCENT,
ISO_A4X6_WHITE,
ISO_A4X6_TRANSPARENT,
ISO_A4X6_TRANSLUCENT,
ISO_A4X7_WHITE,
ISO_A4X7_TRANSPARENT,
ISO_A4X7_TRANSLUCENT,
ISO_A4X8_WHITE,
ISO_A4X8_TRANSPARENT,
ISO_A4X8_TRANSLUCENT,
ISO_A4X9_WHITE,
ISO_A4X9_TRANSPARENT,
ISO_A4X9_TRANSLUCENT,
ISO_A0XSYNCHRO_WHITE,
ISO_A0XSYNCHRO_TRANSPARENT,
ISO_A0XSYNCHRO_TRANSLUCENT,
ISO_A1XSYNCHRO_WHITE,
ISO_A1XSYNCHRO_TRANSPARENT,
ISO_A1XSYNCHRO_TRANSLUCENT,
ISO_A2XSYNCHRO_WHITE,
ISO_A2XSYNCHRO_TRANSPARENT,
ISO_A2XSYNCHRO_TRANSLUCENT,
ISO_A3XSYNCHRO_WHITE,
ISO_A3XSYNCHRO_TRANSPARENT,
ISO_A3XSYNCHRO_TRANSLUCENT,
ISO_A4XSYNCHRO_WHITE,
ISO_A4XSYNCHRO_TRANSPARENT,
ISO_A4XSYNCHRO_TRANSLUCENT,
AUTO_WHITE,
AUTO_TRANSPARENT,
AUTO_TRANSLUCENT,
AUTO_FIXED_SIZE_WHITE,
AUTO_FIXED_SIZE_TRANSPARENT,
AUTO_FIXED_SIZE_TRANSLUCENT,
AUTO_SYNCHRO_WHITE,
AUTO_SYNCHRO_TRANSPARENT,
AUTO_SYNCHRO_TRANSLUCENT,
CUSTOM1,
CUSTOM2,
CUSTOM3,
CUSTOM4,
CUSTOM5,
CUSTOM6,
CUSTOM7,
CUSTOM8,
CUSTOM9,
CUSTOM10,
BOND,
HEAVYWEIGHT,
LABELS,
LETTERHEAD,
PLAIN,
PRE_PRINTED,
PRE_PUNCHED,
RECYCLED,
TRANSPARENCY,};
}
}
| |
/**
* MIT License
*
* Copyright (c) 2017 Justin Kunimune
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package maps;
import maps.Projection.Property;
import maps.Projection.Type;
import utils.Dixon;
import utils.Math2;
import utils.NumericalAnalysis;
/**
* Projections created by projecting onto and then unfolding some kind of polyhedron
*
* @author jkunimune
*/
public class Polyhedral {
private static final double ASIN_ONE_THD = Math.asin(1/3.); //the complement of the angular radius of a tetrahedron face
private static final double ATAN_ONE_HLF = Math.atan(1/2.); //the complement of the angular length of an icosahedron edge
public static final PolyhedralProjection LEE_TETRAHEDRAL_RECTANGULAR = new PolyhedralProjection(
"Lee Tetrahedral", 0b1001, Configuration.TETRAHEDRON_WIDE_FACE, Property.CONFORMAL,
4, null, "that really deserves more attention") {
public double[] faceProject(double lat, double lon) {
final de.jtem.mfc.field.Complex z = de.jtem.mfc.field.Complex.fromPolar(
Math.pow(2, 5/6.)*Math.tan(Math.PI/4-lat/2), lon);
final de.jtem.mfc.field.Complex w = Dixon.invFunc(z);
return new double[] { w.abs()*2/Dixon.PERIOD_THIRD, w.arg() }; //I don't understand Dixon functions well enough to say whence the 1.132 comes
}
public double[] faceInverse(double r, double tht) {
final de.jtem.mfc.field.Complex w = de.jtem.mfc.field.Complex.fromPolar(
r*Dixon.PERIOD_THIRD/2, tht);
final de.jtem.mfc.field.Complex ans = Dixon.leeFunc(w).times(Math.pow(2, -5/6.));
return new double[] {
Math.PI/2 - 2*Math.atan(ans.abs()),
ans.arg() };
}
};
public static final PolyhedralProjection LEE_TETRAHEDRAL_TRIANGULAR = new PolyhedralProjection(
"Lee Tetrahedral (triangular)", 0b1001, Configuration.TRIANGLE_FACE, Property.CONFORMAL,
2, null,
"in a triangle, because this is the form in which it was published, even though the rectangle is clearly better") {
public double[] faceProject(double lat, double lon) {
return LEE_TETRAHEDRAL_RECTANGULAR.faceProject(lat, lon);
}
public double[] faceInverse(double r, double tht) {
return LEE_TETRAHEDRAL_RECTANGULAR.faceInverse(r, tht);
}
};
public static final PolyhedralProjection TETRAGRAPH = new PolyhedralProjection(
"TetraGraph", 0b1111, Configuration.TETRAHEDRON_WIDE_FACE, Property.EQUIDISTANT,
2, null, "that I invented") {
public double[] faceProject(double lat, double lon) {
return new double[] {
Math.atan(1/Math.tan(lat)*Math.cos(lon))/Math.cos(lon) /Math.atan(Math.sqrt(2)),
lon };
}
public double[] faceInverse(double r, double tht) {
return new double[] {
Math.PI/2 - Math.atan(Math.tan(r*Math.cos(tht)*Math.atan(Math.sqrt(2)))/Math.cos(tht)),
tht };
}
};
public static final PolyhedralProjection AUTHAGRAPH = new PolyhedralProjection(
"IMAGO (AuthaGraph)", "Authagraph is a hip new Japanese map that would be super great if "
+ "they actually published their equations. This is technically just an approximation, also known as the Infinitessimal "
+ "Mutated AuthaGraph Offspring.",
0b1011, Configuration.AUTHAGRAPH, Property.COMPROMISE, 3,
new String[] {"Power"}, new double[][] {{.5,1,.68}}) {
private final double[] POLE = {Math.toRadians(77), Math.toRadians(143), Math.toRadians(17)};
private double k;
public void setParameters(double... params) {
this.k = params[0];
}
@Override
public double[] project(double lat, double lon) { //apply a pole shift to AuthaGraph
double[] relCoords = obliquifySphc(lat, lon, POLE);
return super.project(relCoords[0], relCoords[1]);
}
@Override
public double[] inverse(double x, double y) { //because AuthaGraph needs its obliquity, and I didn't want to program that into the Configuration
return obliquifyPlnr(super.inverse(x, y), POLE);
}
public double[] faceProject(double lat, double lon) {
final double tht = Math.atan((lon - Math.asin(Math.sin(lon)/Math.sqrt(3)))/Math.PI*Math.sqrt(12));
final double p = (Math.PI/2 - lat) / Math.atan(Math.sqrt(2)/Math.cos(lon));
return new double[] { Math.pow(p,k)*Math.sqrt(3)/Math.cos(tht), tht };
}
protected double[] faceInverse(double r, double th) {
final double lon = NumericalAnalysis.newtonRaphsonApproximation(th, th*2,
(l) -> Math.atan((l - Math.asin(Math.sin(l)/Math.sqrt(3)))/Math.PI*Math.sqrt(12)),
(l) -> (1-1/Math.sqrt(1+2*Math.pow(Math.cos(l),-2)))/Math.sqrt(Math.pow(Math.PI,2)/12+Math.pow(l-Math.asin(Math.sin(l)/Math.sqrt(3)),2)),
.001);
final double R = r / (Math.sqrt(3)/Math.cos(th));
return new double[] {
Math.PI/2 - Math.pow(R,1/k)*Math.atan(Math.sqrt(2)/Math.cos(lon)), lon };
}
};
public static final PolyhedralProjection AUTHAPOWER = new PolyhedralProjection(
"TetraPower", "A parametrised, simplified version of my AuthaGraph approximation.",
0b1011, Configuration.TETRAHEDRON_WIDE_VERTEX, Property.COMPROMISE, 4,
new String[] {"Power"}, new double[][] {{.5,1,.6}}) {
private double k;
public void setParameters(double... params) {
this.k = params[0];
}
public double[] faceProject(double lat, double lon) {
final double tht = Math.atan((lon - Math.asin(Math.sin(lon)/Math.sqrt(3)))/Math.PI*Math.sqrt(12));
final double p = (Math.PI/2 - lat) / Math.atan(Math.sqrt(2)/Math.cos(lon));
return new double[] { Math.pow(p,k)*Math.sqrt(3)/Math.cos(tht), tht };
}
protected double[] faceInverse(double r, double th) {
final double lon = NumericalAnalysis.newtonRaphsonApproximation(th, th*2,
(l) -> Math.atan((l - Math.asin(Math.sin(l)/Math.sqrt(3)))/Math.PI*Math.sqrt(12)),
(l) -> (1-1/Math.sqrt(1+2*Math.pow(Math.cos(l),-2)))/Math.sqrt(Math.pow(Math.PI,2)/12+Math.pow(l-Math.asin(Math.sin(l)/Math.sqrt(3)),2)),
.001);
final double R = r / (Math.sqrt(3)/Math.cos(th));
return new double[] {
Math.PI/2 - Math.pow(R,1/k)*Math.atan(Math.sqrt(2)/Math.cos(lon)), lon };
}
};
public static final PolyhedralProjection ACTUAUTHAGRAPH = new PolyhedralProjection(
"EquaHedral", "An interrupted authalic tetrahedral projection.",
0b1010, Configuration.TETRAHEDRON_WIDE_VERTEX, Property.EQUAL_AREA, 3,
new String[] {"Sinus length"}, new double[][] {{0, 60, 20}}) {
private double sig, a0, scale;
public void setParameters(double... params) {
this.sig = params[0]/60;
this.a0 = 3 - 1.5*sig*sig;
this.scale = Math.sqrt(3)*a0/Math.PI;
}
public double[] faceProject(double lat, double lon) {
double bet = Math.atan((lon-Math.asin(Math.sin(lon)/Math.sqrt(3)))/(a0/2)*scale);
double f = (1-Math.sin(lat))/(1-1/Math.sqrt(1+2/Math2.cos2(lon)));
if (f < (3*sig*sig)/(2*a0)) { //sinus zone
double alf = Math.atan(2*Math.tan(Math.abs(bet)) - 1/Math.sqrt(3));
double rA = Math.sqrt(2*f*a0/Math2.cos2(alf))/2;
return toPolar(rA, alf, Math.signum(lon));
}
else { //primary zone
double rB = Math.sqrt(a0*f - a0 + 3)/Math.cos(bet);
return new double[] {rB, bet};
}
}
public double[] faceInverse(double r, double th) {
double bet;
double f;
if (r < sig*Math.sqrt(3)/2/Math.cos(Math.abs(th)-Math.PI/3)) //empty
return null;
else if (r*Math.cos(th) < sig*Math.sqrt(3)) { //sinus zone
double[] relCoords = fromPolar(r, th);
double rA = relCoords[0];
double alf = relCoords[1];
bet = Math.signum(th)*Math.atan(Math.tan(alf)/2+1/Math.sqrt(12));
f = Math.pow(rA, 2) * 2*Math2.cos2(alf)/a0;
}
else { //primary zone
bet = th;
f = (r*r*Math2.cos2(bet) - 1.5*sig*sig)/a0;
}
double lon = NumericalAnalysis.newtonRaphsonApproximation(
a0/2*Math.tan(bet)/scale, bet*2,
(l) -> l - Math.asin(Math.sin(l)/Math.sqrt(3)),
(l) -> 1 - 1/Math.sqrt(1 + 2/Math2.cos2(l)), 1e-4);
double lat = Math.asin(1 - f*(1 - 1/Math.sqrt(1+2/Math2.cos2(lon))));
return new double[] {lat, lon};
}
private double[] toPolar(double rA, double alf, double s) {
double x = rA*Math.cos(alf) + sig*Math.sqrt(3)/2;
double y = rA*Math.sin(alf) + sig/2;
return new double[] {Math.hypot(x, y), s*Math.atan2(y, x)};
}
private double[] fromPolar(double rB, double bet) {
double x = rB*Math.cos(bet) - sig*Math.sqrt(3)/2;
double y = Math.abs(rB*Math.sin(bet)) - sig/2;
return new double[] {Math.hypot(x, y), Math.atan2(y, x)};
}
};
public static final Projection VAN_LEEUWEN = new PolyhedralProjection(
"Van Leeuwen", "An uninterrupted equal-area tetrahedral projection. It's more accurately known as \"the Vertex-oriented great circle projection applied to a tetrahedron\", but the guy who copublished it with Leeuwen calls it \"the van Leeuwen projection\" on his website, so I think this is fine.",
0b1011, Configuration.TETRAHEDRON_WIDE_VERTEX, Property.EQUAL_AREA, 3) {
public double[] faceProject(double lat, double lon) {
ACTUAUTHAGRAPH.setParameters(0);
return ACTUAUTHAGRAPH.faceProject(lat, lon);
}
public double[] faceInverse(double r, double th) {
ACTUAUTHAGRAPH.setParameters(0);
return ACTUAUTHAGRAPH.faceInverse(r, th);
}
};
public static final Projection DYMAXION = new PolyhedralProjection(
"Dymaxion", "A polyhedral projection that slices up the oceans as much as possible without slicing up any landmasses.",
0b1110, Configuration.DYMAXION, Property.COMPROMISE, 3) {
private final double[] POLE = {0.040158, -0.091549,-2.015269}; //I derived these numbers from [Robert Gray](http://www.rwgrayprojects.com/rbfnotes/maps/graymap4.html)
private final double X_0 = 0.75;
private final double Y_0 = -Math.sqrt(3)/4;
private final double sin36 = Math.sqrt(10-2*Math.sqrt(5))/4;
private final double cos36 = (1+Math.sqrt(5))/4;
@Override
public double[] project(double lat, double lon) { //apply a pole shift and Cartesian shift to Dymaxion
double[] coords = obliquifySphc(lat, lon, POLE);
coords = super.project(coords[0], coords[1]);
return new double[] {coords[0] + X_0, coords[1] + Y_0};
}
@Override
public double[] inverse(double x, double y) { //because Dymaxion needs its obliquity, and I didn't want to program that into the Configuration
double[] coords = super.inverse(x - X_0, y - Y_0);
if (coords == null) return null;
return obliquifyPlnr(coords, POLE);
}
public double[] faceProject(double lat, double lon) {
double xG = Math.cos(lon)/Math.tan(lat)/cos36; //normalised gnomonic coordinates
double yG = Math.sin(lon)/Math.tan(lat)/sin36;
double a = Math.asin((xG+yG)/(2*Math.sqrt(1+xG*xG))) + Math.atan(xG); //angular distance up each side of the triangle
double b = Math.asin((xG-yG)/(2*Math.sqrt(1+xG*xG))) + Math.atan(xG);
double x = (a + b)/(2*Math.sqrt(3)); //final cartesian coordinates in radians
double y = (a - b)/2;
return new double[] {Math.hypot(x,y)/Math.atan(2), Math.atan2(y,x)}; //scale to fit to layout, where side length is 1
}
public double[] faceInverse(double r, double th) {
if (Math.abs(th) > Math.PI/6+1e-15) throw new IllegalArgumentException("Wait, what?"+th);
double x = r*Math.cos(th)*Math.atan(2); //cartesian coordinates in radians
double y = r*Math.sin(th)*Math.atan(2);
double a = Math.sqrt(3)*x + y; //angular distance up each side of the triangle
double b = Math.sqrt(3)*x - y;
double xG = cos36*(Math.sin(a) + Math.sin(b))/(1 + Math.cos(a) + Math.cos(b)); //unnormalised gnomonic coordinates
double yG = sin36*
(Math.sin(a) - Math.sin(b) + 2*Math.sin(a-b))/(1 + Math.cos(a) + Math.cos(b));
return new double[] {Math.atan(1/Math.hypot(xG, yG)), Math.atan2(yG, xG)}; //inverse gnomonic projection
}
};
/**
* A base for polyhedral Projections
*
* @author jkunimune
*/
private static abstract class PolyhedralProjection extends Projection {
private final Configuration configuration;
public PolyhedralProjection(
String name, int fisc, Configuration config, Property property, int rating,
String adjective, String addendum) {
super(name, config.width, config.height, fisc, config.type, property, rating,
adjective, addendum);
this.configuration = config;
}
public PolyhedralProjection(
String name, String description, int fisc, Configuration config, Property property,
int rating) {
super(name, description, config.width, config.height, fisc, config.type, property,
rating);
this.configuration = config;
}
public PolyhedralProjection(
String name, String description, int fisc, Configuration config, Property property,
int rating, String[] paramNames, double[][] paramValues) {
super(name, description, config.width, config.height, fisc, config.type, property,
rating, paramNames, paramValues);
this.configuration = config;
}
protected abstract double[] faceProject(double lat, double lon); //the projection from spherical to polar within a face
protected abstract double[] faceInverse(double x, double y); //I think you can guess
public double[] project(double lat, double lon) {
final int numSym = configuration.sphereSym; //we're about to be using this variable a lot
double latR = Double.NEGATIVE_INFINITY;
double lonR = Double.NEGATIVE_INFINITY;
double[] centrum = null;
for (double[] testCentrum: configuration.centrumSet) { //iterate through the centrums to see which goes here
final double[] relCoords = obliquifySphc(lat, lon, testCentrum);
if (testCentrum.length > 6) { //if the centrum is long, then it contains longitude bounds
double minL = testCentrum[6]*Math.PI/numSym;
double maxL = testCentrum[7]*Math.PI/numSym;
relCoords[1] = Math2.floorMod(relCoords[1]-minL, 2*Math.PI) + minL;
if (relCoords[1] < minL || relCoords[1] > maxL)
continue; //ignore any longitudes not in the bounds described in [6:7]
}
if (relCoords[0] > latR) { //pick the centrum that maxes out latitude
latR = relCoords[0];
lonR = relCoords[1];
centrum = testCentrum;
}
}
final double lonR0 = Math.floor((lonR+Math.PI/numSym)/(2*Math.PI/numSym))
*(2*Math.PI/numSym); //because most face projections are periodic
final double[] rth = faceProject(latR, lonR - lonR0); //apply the projection to the relative coordinates
final double r = rth[0];
final double th = rth[1] + centrum[3] + lonR0*numSym/configuration.planarSym;
final double x0 = centrum[4];
final double y0 = centrum[5];
double[] output = { r*Math.cos(th) + x0, r*Math.sin(th) + y0 };
if (Math.abs(output[0]) > width/2 || Math.abs(output[1]) > height/2) { //rotate OOB bits around nearest singularity
output = configuration.rotateOOB(output[0], output[1], x0, y0);
}
return output;
}
public double[] inverse(double x, double y) {
if (!configuration.inBounds(x, y)) return null;
final int numSym = configuration.planarSym; //we'll be using this variable a lot soon
double rM = Double.POSITIVE_INFINITY;
double[] centrum = null; //iterate to see which centrum we get
for (double[] testCentrum: configuration.centrumSet) {
final double rR = Math.hypot(x-testCentrum[4], y-testCentrum[5]);
if (rR < rM) { //pick the centrum that minimises r
rM = rR;
centrum = testCentrum;
}
}
final double th0 = centrum[3];
final double x0 = centrum[4];
final double y0 = centrum[5];
final double r = Math.hypot(x - x0, y - y0);
final double th = Math2.coerceAngle(Math.atan2(y - y0, x - x0) - th0);
if (centrum.length > 6) { //if the centrum has extra values, they are angle bounds
if (th < centrum[6]*Math.PI/numSym || th > centrum[7]*Math.PI/numSym)
return null; //ignore any angles not in the bounds described in [6:7]
}
final double thBase = Math.floor((th+Math.PI/numSym)/(2*Math.PI/numSym))
*(2*Math.PI/numSym); //because most face projections are periodic
double[] relCoords = faceInverse(r, th - thBase);
if (relCoords == null)
return null;
relCoords[1] = thBase*numSym/configuration.sphereSym + relCoords[1];
double[] absCoords = obliquifyPlnr(relCoords, centrum);
if (Math.abs(absCoords[1]) > Math.PI)
absCoords[1] = Math2.coerceAngle(absCoords[1]);
return absCoords;
}
}
/**
* A set of objects that determine the layouts of tetrahedral projections
*
* @author jkunimune
*/
private static enum Configuration {
/* LATITUDE, LONGITUDE, CTR_MERID, PLANE_ROT, X, Y */
TETRAHEDRON_WIDE_FACE(3, 3, 6., 2*Math.sqrt(3), new double[][] { // [<|>] arrangement, face-centred
{ ASIN_ONE_THD, Math.PI, -2*Math.PI/3, -2*Math.PI/3, 2, Math.sqrt(3),-1,2 },
{ ASIN_ONE_THD, Math.PI, 2*Math.PI/3, -Math.PI/3, -2, Math.sqrt(3) },
{-Math.PI/2, 0, 2*Math.PI/3, 2*Math.PI/3, 2, -Math.sqrt(3),-2,1 },
{-Math.PI/2, 0, -2*Math.PI/3, Math.PI/3, -2, -Math.sqrt(3) },
{ ASIN_ONE_THD, Math.PI/3, -2*Math.PI/3, Math.PI, 1, 0 },
{ ASIN_ONE_THD, -Math.PI/3, 2*Math.PI/3, 0, -1, 0 }}),
TRIANGLE_FACE(3, 3, 4*Math.sqrt(3), 6., new double[][] { // \delta arrangement, like they are often published
{ ASIN_ONE_THD, Math.PI/3, 0, -5*Math.PI/6, Math.sqrt(3), 2 },
{ ASIN_ONE_THD, -Math.PI/3, 0, -Math.PI/6, -Math.sqrt(3), 2 },
{ ASIN_ONE_THD, Math.PI, 0, Math.PI/2, 0, -1 },
{-Math.PI/2, 0, 0, -Math.PI/2, 0, 1 }}) {
@Override public boolean inBounds(double x, double y) {
return y > Math.sqrt(3)*Math.abs(x) - 3;
}
},
TETRAHEDRON_WIDE_VERTEX(3, 6, 6., 2*Math.sqrt(3), new double[][] { // [<|>] arrangement, vertex-centred
{ Math.PI/2, 0, 0, -Math.PI/2, 0, Math.sqrt(3) },
{-ASIN_ONE_THD, 0, Math.PI, Math.PI/2, 0, -Math.sqrt(3) },
{-ASIN_ONE_THD, 2*Math.PI/3, Math.PI, 5*Math.PI/6, 3, 0 },
{-ASIN_ONE_THD, -2*Math.PI/3, Math.PI, Math.PI/6, -3, 0 }}) {
@Override public double[] rotateOOB(double x, double y, double xCen, double yCen) {
if (Math.abs(x) > width/2)
return new double[] {2*xCen - x, -y};
else
return new double[] {-x, height*Math.signum(y) - y};
}
},
AUTHAGRAPH(3, 6, 4*Math.sqrt(3), 3, new double[][] { // |\/\/`| arrangement, vertex-centred
{-ASIN_ONE_THD, Math.PI, Math.PI, 0, -2*Math.sqrt(3)-.6096, 1.5 },
{-ASIN_ONE_THD, -Math.PI/3, Math.PI/3, 0, -Math.sqrt(3)-.6096, -1.5 },
{ Math.PI/2, 0, Math.PI, 0, 0-.6096, 1.5 },
{-ASIN_ONE_THD, Math.PI/3, -Math.PI/3, 0, Math.sqrt(3)-.6096, -1.5 },
{-ASIN_ONE_THD, Math.PI, Math.PI, 0, 2*Math.sqrt(3)-.6096, 1.5 },
{-ASIN_ONE_THD, -Math.PI/3, Math.PI/3, 0, 3*Math.sqrt(3)-.6096, -1.5 }}) {
@Override public double[] rotateOOB(double x, double y, double xCen, double yCen) {
if (Math.abs(y) > height/2) {
x = 2*xCen - x;
y = 2*yCen - y;
}
if (Math.abs(x) > width/2)
x = Math2.floorMod(x+width/2,width)-width/2;
return new double[] {x, y};
}
},
DYMAXION(5, 6, 5.5, 1.5*Math.sqrt(3), new double[][] { // I can't draw this in ASCII. You know what "Dymaxion" means
{ Math.PI/2, 0.0, -3*Math.PI/5,-Math.PI/2, -1.5, Math.sqrt(3), -3,3 }, //West Africa
{ Math.PI/2, 0.0, Math.PI/5, -Math.PI/2, 0.5, Math.sqrt(3), -1,1 }, //Brazil
{ Math.PI/2, 0.0, 3*Math.PI/5,-Math.PI/2, 1.5, Math.sqrt(3), -1,1 }, //South Atlantic O.
{ ATAN_ONE_HLF,-4*Math.PI/5, 2*Math.PI/5,-Math.PI/6, -2.0, Math.sqrt(3)/2,-5,5 }, //Arabia
{ ATAN_ONE_HLF,-2*Math.PI/5,-2*Math.PI/5,-5*Math.PI/6,-1.0, Math.sqrt(3)/2,-5,5 }, //Scandanavia
{ ATAN_ONE_HLF, 0.0, 0.0, -Math.PI/2, 0.0, Math.sqrt(3)/2,-3,5 }, //Caribbean
{ ATAN_ONE_HLF, 0.0, -4*Math.PI/5,-5*Math.PI/6,-0.5, Math.sqrt(3), -1,1 }, //North Atlantic O.
{ ATAN_ONE_HLF, 2*Math.PI/5, 0.0, -Math.PI/2, 1.0, Math.sqrt(3)/2,-5,5 }, //Patagonia
{ ATAN_ONE_HLF, 4*Math.PI/5,-2*Math.PI/5,-5*Math.PI/6, 2.0, Math.sqrt(3)/2,-3,2 }, //East Antarctica
{ ATAN_ONE_HLF, 4*Math.PI/5, 0.0, -Math.PI/6, -3.5, 0.0, 0,1 }, //South Indian O.
{ ATAN_ONE_HLF, 4*Math.PI/5, 2*Math.PI/5,-Math.PI/6, -3.0, Math.sqrt(3)/2,-1,1 }, //North Indian O.
{ ATAN_ONE_HLF, 4*Math.PI/5, 4*Math.PI/5,-Math.PI/6, -2.5, Math.sqrt(3), -1,1 }, //South Africa
{-ATAN_ONE_HLF,-Math.PI, Math.PI/5, -Math.PI/6, -2.5, 0.0, -5,5 }, //Australia
{-ATAN_ONE_HLF,-3*Math.PI/5, Math.PI, Math.PI/2, -1.5, 0.0, -6,4 }, //China
{-ATAN_ONE_HLF,-Math.PI/5, Math.PI, Math.PI/2, -0.5, 0.0, -5,5 }, //North America
{-ATAN_ONE_HLF, Math.PI/5, -3*Math.PI/5, 5*Math.PI/6, 0.5, 0.0, -5,5 }, //East Pacific O.
{-ATAN_ONE_HLF, 3*Math.PI/5, Math.PI, Math.PI/2, 1.5, 0.0, -3,3 }, //West Antarctica
{-ATAN_ONE_HLF, 3*Math.PI/5,-Math.PI/5, 5*Math.PI/6, 1.0,-Math.sqrt(3)/2,-1,1 }, //South Pacific O.
{-ATAN_ONE_HLF, 3*Math.PI/5, Math.PI/5, Math.PI/6, -3.0,-Math.sqrt(3)/2,-1,1 }, //New Zealand
{-Math.PI/2, 0.0, -Math.PI, Math.PI/2, 0.0,-Math.sqrt(3)/2,-3,1 }, //Hawai`i
{-Math.PI/2, 0.0, -3*Math.PI/5, Math.PI/2, -1.0,-Math.sqrt(3)/2,-1,2 }, //West Pacific O.
{-Math.PI/2, 0.0, -Math.PI/5, Math.PI/2, -2.0,-Math.sqrt(3)/2, 0,3 }}); //Melanesia
/* LATITUDE, LONGITUDE, CTR_MERID, PLANE_ROT, X, Y RANGE */
public final int sphereSym, planarSym; //the numbers of symmetries in the two coordinate systems
public final double width, height; //the width and height of a map with this configuration
public final double[][] centrumSet; //the mathematical information about this configuration
public final Type type; //holds the number of faces
private Configuration(int sphereSym, int planarSym, double width, double height, double[][] centrumSet) {
this.width = width;
this.height = height;
this.sphereSym = sphereSym;
this.planarSym = planarSym;
this.centrumSet = centrumSet;
if (sphereSym == 3)
this.type = Type.TETRAHEDRAL;
else
this.type = Type.ICOSOHEDRAL;
}
public double[] rotateOOB(double x, double y, double xCen, double yCen) { //move points that are out of bounds for project()
return new double[] {x, y}; //this method should be overridden by projections with weird geometry
}
public boolean inBounds(double x, double y) {return true;} //determine whether a point is in bounds for inverse()
}
}
| |
package org.wtrader.loader.crawler;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.log4j.Logger;
import org.wtrader.cep.utils.data.entities.BenefitStockEntity;
import org.wtrader.cep.utils.data.entities.BenefitTypeEntity;
import org.wtrader.cep.utils.data.entities.CompanyEntity;
import org.wtrader.cep.utils.data.entities.StockEntity;
import org.wtrader.cep.utils.data.enums.BenefitType;
import org.wtrader.cep.utils.data.interfaces.IBenefitStockData;
import org.wtrader.cep.utils.data.interfaces.IBenefitTypeData;
import org.wtrader.cep.utils.data.interfaces.ICompanyData;
import org.wtrader.cep.utils.data.interfaces.IDataStorage;
import org.wtrader.cep.utils.data.interfaces.IStockData;
import org.wtrader.loader.crawler.interfaces.ICrawlerAllCompanies;
import org.wtrader.loader.crawler.interfaces.ICrawlerEventosCorporativos;
import org.wtrader.loader.utils.beans.BenefitStockBean;
import org.wtrader.loader.utils.beans.CompanyBean;
import org.wtrader.loader.utils.interfaces.ICrawlerLoader;
@Named
public class CrawlerLoader implements ICrawlerLoader {
private static final Logger LOGGER = Logger.getLogger(CrawlerLoader.class);
@Inject
private IBenefitStockData benefitStockData;
@Inject
private IBenefitTypeData benefitTypeData;
@Inject
private ICompanyData companyData;
@Inject
private IStockData stockData;
@Inject
private IDataStorage dataStorage;
@Inject
private ICrawlerAllCompanies crawlerAllCompanies;
@Inject
private ICrawlerEventosCorporativos crawlerEventosCorporativos;
private Map<BenefitType, BenefitTypeEntity> benefitTypes;
public CrawlerLoader() {
}
@Override
public void load() {
this.loadBenefitsTypes();
List<String[]> companies = null;
CompanyBean companyBean;
try {
companies = this.crawlerAllCompanies.retrieveAllCompanies();
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
if (companies == null) {
LOGGER.error("Problem to retrieve all companies.");
return;
}
for (String[] company : companies) {
try {
companyBean = this.crawlerEventosCorporativos.retrieveCompany(company[0], company[1]);
if (companyBean != null) {
this.saveCompany(companyBean);
} else {
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Company not founded to codigoCvm [%s] and companyName [%s].",
company[0], company[1]));
}
}
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
}
private void loadBenefitsTypes() {
if (this.benefitTypes == null) {
this.benefitTypes = new HashMap<BenefitType, BenefitTypeEntity>();
for (BenefitTypeEntity benefit : this.benefitTypeData.findAll()) {
this.benefitTypes.put(benefit.getType(), benefit);
}
}
}
private void saveCompany(CompanyBean company) {
if ((company.getStocks() == null) || company.getStocks().isEmpty()) {
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Company without shares on market [%s].", company));
}
return;
}
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Saving the company [%s].", company));
}
CompanyEntity companyEntity = this.saveAndGetCompanyEntity(company);
this.saveStocks(companyEntity, company);
this.saveBenefits(companyEntity, company);
}
private void saveStocks(CompanyEntity companyEntity, CompanyBean companyBean) {
for (String stockName : companyBean.getStocks()) {
if ((stockName == null) || stockName.isEmpty()) {
LOGGER.warn(String.format("Stock name is empty [%s].", companyBean));
continue;
}
StockEntity stockEntity = this.stockData.findByName(stockName);
if (stockEntity == null) {
stockEntity = new StockEntity();
stockEntity.setCompany(companyEntity);
stockEntity.setName(stockName);
stockEntity.setWasNormalized(false);
stockEntity = this.dataStorage.saveStock(stockEntity);
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Stock saved [%s].", stockEntity));
}
} else {
if (stockEntity.getCompany().getId() != companyEntity.getId()) {
LOGGER.error(String.format("Stock [%s] associated with different companies [%s][%s].",
stockName, stockEntity.getCompany().getId(), companyEntity.getId()));
}
}
}
}
private void saveBenefits(CompanyEntity companyEntity, CompanyBean company) {
if (company.getBenefitsInStock() == null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Company [%s] without benefits in stock.", company));
}
return;
}
for (BenefitStockBean benefit : company.getBenefitsInStock()) {
BenefitStockEntity benefitEntity = this.benefitStockData.findByDeliberateAndCompany(benefit.getDeliberate(), companyEntity);
if (benefitEntity != null) {
continue;
}
BenefitTypeEntity benefitType = this.getBenefitType(benefit.getName());
if (benefitType == null) {
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Benefit in Stock [%s] not recognized.", benefit));
}
continue;
}
benefitEntity = new BenefitStockEntity();
benefitEntity.setBusinessesWithUp(benefit.getBusinessesWithUp());
benefitEntity.setCompany(companyEntity);
benefitEntity.setDeliberate(benefit.getDeliberate());
benefitEntity.setFactor(benefit.getFactor());
benefitEntity.setSharesInCredit(benefit.getSharesInCredit());
benefitEntity.setBenefitType(benefitType);
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Saving a new benefit [%s].", benefitEntity));
}
benefitEntity = this.dataStorage.saveBenefitStock(benefitEntity);
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Benefit in stock saved [%s].", benefitEntity));
}
}
}
private BenefitTypeEntity getBenefitType(String name) {
BenefitType benefitType = BenefitType.parser(name);
if (benefitType == null) {
if (name.startsWith("b")) {
benefitType = BenefitType.BONIFICATION;
} if (name.startsWith("c")) {
benefitType = BenefitType.FISSION;
} else if (name.startsWith("d")) {
benefitType = BenefitType.SPLIT;
} else if (name.startsWith("g")) {
benefitType = BenefitType.INPLIT;
} else if (name.startsWith("r")) {
benefitType = BenefitType.RETURN_CAPITAL;
}
}
if (benefitType == null) {
return null;
}
return this.benefitTypes.get(benefitType);
}
private CompanyEntity saveAndGetCompanyEntity(CompanyBean company) {
CompanyEntity companyEntity = this.companyData.findByCodeCvm(company.getCodeCvm());
if (companyEntity == null) {
companyEntity = new CompanyEntity();
companyEntity.setName(company.getTradeName().toLowerCase());
companyEntity.setCodeCvm(company.getCodeCvm());
companyEntity.setUnitaryQuotationSince(company.getUnitaryQuotationSince());
companyEntity = this.dataStorage.saveCompany(companyEntity);
} else {
boolean update = false;
if (!companyEntity.getName().toLowerCase().equals(company.getTradeName())) {
LOGGER.error(String.format("Code cvm associated with different companies [%s][%s].",
companyEntity.getName(), company.getTradeName()));
return companyEntity;
}
if (companyEntity.getUnitaryQuotationSince().compareTo(company.getUnitaryQuotationSince()) != 0) {
if (LOGGER.isInfoEnabled()) {
LOGGER.info(String.format("Updating the record Unitary Quotation Since [%s] of company [%s].",
company.getUnitaryQuotationSince(), companyEntity.getName()));
}
update = true;
companyEntity.setUnitaryQuotationSince(company.getUnitaryQuotationSince());
}
if (update) {
companyEntity = this.dataStorage.saveCompany(companyEntity);
}
}
return companyEntity;
}
}
| |
/*
* Copyright 2011 - 2013 NTB University of Applied Sciences in Technology
* Buchs, Switzerland, http://www.ntb.ch/inf
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.ntb.inf.deep.runtime.mpc555.driver;
/* Changes:
* 14.09.2009 NTB/SP creation
*/
/**
* Driver for the DS1302Z real time clock.<br>
* Pin 13, 14 and 15 of the TPUB are used.<br>
* Before the first write disable the write protection with {@link #setWriteProtection(boolean)}.
*
*/
public class DS1302Z {
private static final boolean tpuB = false;
private static final int RST = 13;
private static final int SCKL = 14;
private static final int IO = 15;
private static final byte sec = (byte) 0x80;
private static final byte min = (byte) 0x82;
private static final byte hour = (byte) 0x84;
private static final byte date = (byte)0x86;
private static final byte month = (byte) 0x88;
private static final byte day = (byte)0x8A;
private static final byte year = (byte)0x8C;
private static final byte ctrl = (byte) 0x8E;
private static TPU_DIO rst, sckl, io;
/**
* Write a value to the RTC.
*
* @param type The type (sec, min, ...) to write.
* @param val The value to write.
*/
private static void write(byte type, byte val){
int w = ((val << 8) | (0xFF & type));
rst.set(true);
io.dir(true);
for(int i = 0x1; i < 0x10000; i<<=1){
sckl.set(false);
if((w & i) != 0) {
io.set(true);
}
else {
io.set(false);
}
sckl.set(true);
}
sckl.set(false);
rst.set(false);
}
/**
* Read a value from the RTC.
* @param type The type to read (sec, min, ...).
* @return The read value.
*/
private static int read(byte type){
type |= 0x01;
rst.set(true);
io.dir(true);
for(int i = 0x1; i < 0x100; i<<=1){
sckl.set(false);
if((type & i) != 0) io.set(true);
else io.set(false);
sckl.set(true);
}
int val = 0;
io.dir(false);
for(int i = 0x1; i < 0x100; i<<=1){
sckl.set(true);
sckl.set(false);
if(io.get()) val |= i;
}
io.dir(false);
rst.set(false);
return val;
}
/**
* Read the actual second value from the RTC.
* @return The actual seconds.
*/
public static int getSec(){
int val = read(sec);
return (val & 0xF) + ((val >> 4) & 0x7) * 10;
}
/**
* Read the actual minute value from the RTC.
* @return The actual minutes.
*/
public static int getMin(){
int val = read(min);
return (val & 0xF) + (val >> 4) * 10;
}
/**
* Read the actual hour value from the RTC.
* @return The actual hours.
*/
public static int getHour(){
int val = read(hour);
return (val & 0xF) + ((val >> 4) & 0x3) * 10;
}
/**
* Reads the actual date value from the RTC.
* @return The actual date.
*/
public static int getDate(){
int val = read(date);
return (val & 0xF) + (val >> 4) * 10;
}
/**
* Reads the actual month value from the RTC.
* @return The actual month.
*/
public static int getMonth(){
int val = read(month);
return (val & 0xF) + (val >> 4) * 10;
}
/**
* Reads the actual day value from the RTC.
* @return The actual day of the week.
*/
public static int getDay(){
return read(day);
}
/**
* Reads the actual year value from the RTC.
* @return The actual year.
*/
public static int getYear(){
int val = read(year);
return (val & 0xF) + ((val >> 4) & 0xF)* 10 + 2000;
}
/**
* Set the actual second value to the RTC.
* @param val The actual seconds.
*/
public static void setSec(int val){
int v = val % 10;
v |= (val / 10) << 4;
write(sec,(byte) v);
}
/**
* Set the actual minute value to the RTC.
* @param val The actual minutes.
*/
public static void setMin(int val){
int v = val % 10;
v |= (val / 10) << 4;
write(min,(byte) v);
}
/**
* Set the actual hour value to the RTC.
* @param val The actual hour.
*/
public static void setHour(int val){
int v = val % 10;
v |= ((val / 10) << 4);
write(hour,(byte) v);
}
/**
* Set the actual date value to the RTC.
* @param val The actual date.
*
*/
public static void setDate(int val){
int v = val % 10;
v |= (val / 10) << 4;
write(date,(byte) v);
}
/**
* Set the actual month value to the RTC.
* @param val The actual month.
*/
public static void setMonth(int val){
int v = val % 10;
v |= (val / 10) << 4;
write(month,(byte) v);
}
/**
* Set the actual Day value to the RTC
* @param val The actual Day of the week.
*/
public static void setDay(int val){
write(day,(byte)val);
}
/**
* Set the actual year value to the RTC.
* @param val The actual year.
*/
public static void setYear(int val){
if(val > 2000) val -= 2000;
int v = val % 10;
v |= (val / 10) << 4;
write(year,(byte) v);
}
/**
* Returns the write protection state.
* @return <code>true</code> if the write protection is enabled, <code>false</code> otherwise.
*/
public static boolean writeProtected(){
return (read(ctrl) & 0x80) != 0;
}
/**
* Enable or disable the write protection.
* @param enable <code>true</code> to enable, <code>false</code> to disable.
*/
public static void setWriteProtection(boolean enable){
if(enable) write(ctrl,(byte) 0xFF);
else write(ctrl, (byte)0 );
}
static{
rst = new TPU_DIO(tpuB, RST , true);
sckl = new TPU_DIO(tpuB, SCKL, true);
io = new TPU_DIO(tpuB, IO, false);
rst.set(false);
sckl.set(false);
}
}
| |
package org.hl7.fhir.instance.model.valuesets;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Jul 21, 2015 10:37-0400 for FHIR v0.5.0
public enum V3Ethnicity {
/**
* Hispanic or Latino
*/
_21352,
/**
* Spaniard
*/
_21378,
/**
* Andalusian
*/
_21386,
/**
* Asturian
*/
_21394,
/**
* Castillian
*/
_21402,
/**
* Catalonian
*/
_21410,
/**
* Belearic Islander
*/
_21428,
/**
* Gallego
*/
_21436,
/**
* Valencian
*/
_21444,
/**
* Canarian
*/
_21451,
/**
* Spanish Basque
*/
_21469,
/**
* Mexican
*/
_21485,
/**
* Mexican American
*/
_21493,
/**
* Mexicano
*/
_21501,
/**
* Chicano
*/
_21519,
/**
* La Raza
*/
_21527,
/**
* Mexican American Indian
*/
_21535,
/**
* Central American
*/
_21550,
/**
* Costa Rican
*/
_21568,
/**
* Guatemalan
*/
_21576,
/**
* Honduran
*/
_21584,
/**
* Nicaraguan
*/
_21592,
/**
* Panamanian
*/
_21600,
/**
* Salvadoran
*/
_21618,
/**
* Central American Indian
*/
_21626,
/**
* Canal Zone
*/
_21634,
/**
* South American
*/
_21659,
/**
* Argentinean
*/
_21667,
/**
* Bolivian
*/
_21675,
/**
* Chilean
*/
_21683,
/**
* Colombian
*/
_21691,
/**
* Ecuadorian
*/
_21709,
/**
* Paraguayan
*/
_21717,
/**
* Peruvian
*/
_21725,
/**
* Uruguayan
*/
_21733,
/**
* Venezuelan
*/
_21741,
/**
* South American Indian
*/
_21758,
/**
* Criollo
*/
_21766,
/**
* Latin American
*/
_21782,
/**
* Puerto Rican
*/
_21808,
/**
* Cuban
*/
_21824,
/**
* Dominican
*/
_21840,
/**
* Note that this term remains in the table for completeness, even though within HL7, the notion of "not otherwise coded" term is deprecated.
*/
_21865,
/**
* added to help the parsers
*/
NULL;
public static V3Ethnicity fromCode(String codeString) throws Exception {
if (codeString == null || "".equals(codeString))
return null;
if ("2135-2".equals(codeString))
return _21352;
if ("2137-8".equals(codeString))
return _21378;
if ("2138-6".equals(codeString))
return _21386;
if ("2139-4".equals(codeString))
return _21394;
if ("2140-2".equals(codeString))
return _21402;
if ("2141-0".equals(codeString))
return _21410;
if ("2142-8".equals(codeString))
return _21428;
if ("2143-6".equals(codeString))
return _21436;
if ("2144-4".equals(codeString))
return _21444;
if ("2145-1".equals(codeString))
return _21451;
if ("2146-9".equals(codeString))
return _21469;
if ("2148-5".equals(codeString))
return _21485;
if ("2149-3".equals(codeString))
return _21493;
if ("2150-1".equals(codeString))
return _21501;
if ("2151-9".equals(codeString))
return _21519;
if ("2152-7".equals(codeString))
return _21527;
if ("2153-5".equals(codeString))
return _21535;
if ("2155-0".equals(codeString))
return _21550;
if ("2156-8".equals(codeString))
return _21568;
if ("2157-6".equals(codeString))
return _21576;
if ("2158-4".equals(codeString))
return _21584;
if ("2159-2".equals(codeString))
return _21592;
if ("2160-0".equals(codeString))
return _21600;
if ("2161-8".equals(codeString))
return _21618;
if ("2162-6".equals(codeString))
return _21626;
if ("2163-4".equals(codeString))
return _21634;
if ("2165-9".equals(codeString))
return _21659;
if ("2166-7".equals(codeString))
return _21667;
if ("2167-5".equals(codeString))
return _21675;
if ("2168-3".equals(codeString))
return _21683;
if ("2169-1".equals(codeString))
return _21691;
if ("2170-9".equals(codeString))
return _21709;
if ("2171-7".equals(codeString))
return _21717;
if ("2172-5".equals(codeString))
return _21725;
if ("2173-3".equals(codeString))
return _21733;
if ("2174-1".equals(codeString))
return _21741;
if ("2175-8".equals(codeString))
return _21758;
if ("2176-6".equals(codeString))
return _21766;
if ("2178-2".equals(codeString))
return _21782;
if ("2180-8".equals(codeString))
return _21808;
if ("2182-4".equals(codeString))
return _21824;
if ("2184-0".equals(codeString))
return _21840;
if ("2186-5".equals(codeString))
return _21865;
throw new Exception("Unknown V3Ethnicity code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case _21352: return "2135-2";
case _21378: return "2137-8";
case _21386: return "2138-6";
case _21394: return "2139-4";
case _21402: return "2140-2";
case _21410: return "2141-0";
case _21428: return "2142-8";
case _21436: return "2143-6";
case _21444: return "2144-4";
case _21451: return "2145-1";
case _21469: return "2146-9";
case _21485: return "2148-5";
case _21493: return "2149-3";
case _21501: return "2150-1";
case _21519: return "2151-9";
case _21527: return "2152-7";
case _21535: return "2153-5";
case _21550: return "2155-0";
case _21568: return "2156-8";
case _21576: return "2157-6";
case _21584: return "2158-4";
case _21592: return "2159-2";
case _21600: return "2160-0";
case _21618: return "2161-8";
case _21626: return "2162-6";
case _21634: return "2163-4";
case _21659: return "2165-9";
case _21667: return "2166-7";
case _21675: return "2167-5";
case _21683: return "2168-3";
case _21691: return "2169-1";
case _21709: return "2170-9";
case _21717: return "2171-7";
case _21725: return "2172-5";
case _21733: return "2173-3";
case _21741: return "2174-1";
case _21758: return "2175-8";
case _21766: return "2176-6";
case _21782: return "2178-2";
case _21808: return "2180-8";
case _21824: return "2182-4";
case _21840: return "2184-0";
case _21865: return "2186-5";
default: return "?";
}
}
public String getSystem() {
return "http://hl7.org/fhir/v3/Ethnicity";
}
public String getDefinition() {
switch (this) {
case _21352: return "Hispanic or Latino";
case _21378: return "Spaniard";
case _21386: return "Andalusian";
case _21394: return "Asturian";
case _21402: return "Castillian";
case _21410: return "Catalonian";
case _21428: return "Belearic Islander";
case _21436: return "Gallego";
case _21444: return "Valencian";
case _21451: return "Canarian";
case _21469: return "Spanish Basque";
case _21485: return "Mexican";
case _21493: return "Mexican American";
case _21501: return "Mexicano";
case _21519: return "Chicano";
case _21527: return "La Raza";
case _21535: return "Mexican American Indian";
case _21550: return "Central American";
case _21568: return "Costa Rican";
case _21576: return "Guatemalan";
case _21584: return "Honduran";
case _21592: return "Nicaraguan";
case _21600: return "Panamanian";
case _21618: return "Salvadoran";
case _21626: return "Central American Indian";
case _21634: return "Canal Zone";
case _21659: return "South American";
case _21667: return "Argentinean";
case _21675: return "Bolivian";
case _21683: return "Chilean";
case _21691: return "Colombian";
case _21709: return "Ecuadorian";
case _21717: return "Paraguayan";
case _21725: return "Peruvian";
case _21733: return "Uruguayan";
case _21741: return "Venezuelan";
case _21758: return "South American Indian";
case _21766: return "Criollo";
case _21782: return "Latin American";
case _21808: return "Puerto Rican";
case _21824: return "Cuban";
case _21840: return "Dominican";
case _21865: return "Note that this term remains in the table for completeness, even though within HL7, the notion of 'not otherwise coded' term is deprecated.";
default: return "?";
}
}
public String getDisplay() {
switch (this) {
case _21352: return "Hispanic or Latino";
case _21378: return "Spaniard";
case _21386: return "Andalusian";
case _21394: return "Asturian";
case _21402: return "Castillian";
case _21410: return "Catalonian";
case _21428: return "Belearic Islander";
case _21436: return "Gallego";
case _21444: return "Valencian";
case _21451: return "Canarian";
case _21469: return "Spanish Basque";
case _21485: return "Mexican";
case _21493: return "Mexican American";
case _21501: return "Mexicano";
case _21519: return "Chicano";
case _21527: return "La Raza";
case _21535: return "Mexican American Indian";
case _21550: return "Central American";
case _21568: return "Costa Rican";
case _21576: return "Guatemalan";
case _21584: return "Honduran";
case _21592: return "Nicaraguan";
case _21600: return "Panamanian";
case _21618: return "Salvadoran";
case _21626: return "Central American Indian";
case _21634: return "Canal Zone";
case _21659: return "South American";
case _21667: return "Argentinean";
case _21675: return "Bolivian";
case _21683: return "Chilean";
case _21691: return "Colombian";
case _21709: return "Ecuadorian";
case _21717: return "Paraguayan";
case _21725: return "Peruvian";
case _21733: return "Uruguayan";
case _21741: return "Venezuelan";
case _21758: return "South American Indian";
case _21766: return "Criollo";
case _21782: return "Latin American";
case _21808: return "Puerto Rican";
case _21824: return "Cuban";
case _21840: return "Dominican";
case _21865: return "Not Hispanic or Latino";
default: return "?";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.management.internal.configuration.utils;
import static com.gemstone.gemfire.management.internal.configuration.utils.XmlConstants.W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION;
import static com.gemstone.gemfire.management.internal.configuration.utils.XmlConstants.W3C_XML_SCHEMA_INSTANCE_PREFIX;
import static javax.xml.XMLConstants.NULL_NS_URI;
import static javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import com.gemstone.gemfire.internal.cache.xmlcache.CacheXml;
import com.gemstone.gemfire.internal.cache.xmlcache.CacheXmlParser;
import com.gemstone.gemfire.internal.lang.StringUtils;
import com.gemstone.gemfire.management.internal.configuration.domain.CacheElement;
import com.gemstone.gemfire.management.internal.configuration.domain.XmlEntity;
public class XmlUtils {
/**
* Create an XML {@link Document} from the given {@link Reader}.
*
* @param reader
* to create document from.
* @return {@link Document} if successful, otherwise false.
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
* @since 8.1
*/
public static Document createDocumentFromReader(final Reader reader) throws SAXException, ParserConfigurationException, IOException {
Document doc = null;
InputSource inputSource = new InputSource(reader);
doc = getDocumentBuilder().parse(inputSource);
return doc;
}
public static NodeList query(Node node, String searchString) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
return (NodeList) xpath.evaluate(searchString, node, XPathConstants.NODESET);
}
public static NodeList query(Node node, String searchString, XPathContext xpathcontext) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setNamespaceContext(xpathcontext);
return (NodeList) xpath.evaluate(searchString, node, XPathConstants.NODESET);
}
public static Element querySingleElement(Node node, String searchString, final XPathContext xPathContext) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setNamespaceContext(xPathContext);
Object result = xpath.evaluate(searchString, node, XPathConstants.NODE);
try {
return (Element) result;
} catch(ClassCastException e) {
throw new XPathExpressionException("Not an org.w3c.dom.Element: " + result);
}
}
public static DocumentBuilder getDocumentBuilder() throws ParserConfigurationException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
// the actual builder or parser
DocumentBuilder builder = factory.newDocumentBuilder();
builder.setEntityResolver(new CacheXmlParser());
return builder;
}
/*****
* Adds a new node or replaces an existing node in the Document
* @param doc Target document where the node will added
* @param xmlEntity contains definition of the xml entity
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws XPathExpressionException
*/
public static void addNewNode(final Document doc, final XmlEntity xmlEntity) throws IOException, XPathExpressionException, SAXException, ParserConfigurationException {
// Build up map per call to avoid issues with caching wrong version of the map.
final LinkedHashMap<String, CacheElement> elementOrderMap = CacheElement.buildElementMap(doc);
final Node newNode = createNode(doc, xmlEntity.getXmlDefinition());
final Node root = doc.getDocumentElement();
final int incomingElementOrder = getElementOrder(elementOrderMap, xmlEntity.getNamespace(), xmlEntity.getType());
boolean nodeAdded = false;
NodeList nodes = root.getChildNodes();
final int length = nodes.getLength();
for (int i=0; i < length; i++) {
final Node node = nodes.item(i);
if (node instanceof Element) {
final Element childElement = (Element) node;
final String type = childElement.getLocalName();
final String namespace = childElement.getNamespaceURI();
if (namespace.equals(xmlEntity.getNamespace())
&& type.equals(xmlEntity.getType())) {
// TODO this should really be checking all attributes in xmlEntity.getAttributes
//First check if the element has a name
String nameOrId = getAttribute(childElement, "name");
//If not then check if the element has an Id
if (nameOrId == null) {
nameOrId = getAttribute(childElement, "id");
}
if (nameOrId != null) {
//If there is a match , then replace the existing node with the incoming node
if (nameOrId.equals(xmlEntity.getNameOrId())) {
root.replaceChild(newNode, node);
nodeAdded = true;
break;
}
} else {
//This element does not have any name or id identifier for e.g PDX and gateway-receiver
//If there is only one element of that type then replace it with the incoming node
if (!isMultiple(elementOrderMap, namespace, type)) {
root.replaceChild(newNode, node);
nodeAdded = true;
break;
}
}
} else {
if (incomingElementOrder < getElementOrder(elementOrderMap, namespace, type)) {
root.insertBefore(newNode, node);
nodeAdded = true;
break;
}
}
}
}
if (!nodeAdded) {
root.appendChild(newNode);
}
}
/**
* @param elementOrderMap
* @param namespace
* @param type
* @return <code>true</code> if element allows multiple, otherwise
* <code>false</code>.
* @since 8.1
*/
private static boolean isMultiple(final LinkedHashMap<String, CacheElement> elementOrderMap, final String namespace, final String type) {
if (CacheXml.NAMESPACE.equals(namespace)) {
// We only keep the cache elements in elementOrderMap
final CacheElement cacheElement = elementOrderMap.get(type);
if (null != cacheElement) {
return cacheElement.isMultiple();
}
}
// Assume all extensions are not multiples.
// To support multiple on extensions our map needs to included other
// namespaces
return false;
}
/**
* @param elementOrderMap
* @param namespace
* @param type
* @return position of the element if in map, otherwise
* {@link Integer#MAX_VALUE}.
* @since 8.1
*/
private static int getElementOrder(final LinkedHashMap<String, CacheElement> elementOrderMap, final String namespace, final String type) {
if (CacheXml.NAMESPACE.equals(namespace)) {
// We only keep the cache elements in elementOrderMap
final CacheElement cacheElement = elementOrderMap.get(type);
if (null != cacheElement) {
return cacheElement.getOrder();
}
}
// Assume all extensions are order independent.
return Integer.MAX_VALUE;
}
/****
* Creates a node from the String xml definition
* @param owner
* @param xmlDefintion
* @return Node representing the xml definition
* @throws ParserConfigurationException
* @throws IOException
* @throws SAXException
*/
private static Node createNode(Document owner, String xmlDefintion) throws SAXException, IOException, ParserConfigurationException {
InputSource inputSource = new InputSource(new StringReader(xmlDefintion));
Document document = getDocumentBuilder().parse(inputSource);
Node newNode = document.getDocumentElement();
return owner.importNode(newNode, true);
}
public static String getAttribute(Node node, String name) {
NamedNodeMap attributes = node.getAttributes();
if(attributes == null) {
return null;
}
Node attributeNode = node.getAttributes().getNamedItem(name);
if(attributeNode == null) {
return null;
}
return attributeNode.getTextContent();
}
public static String getAttribute(Node node, String localName, String namespaceURI) {
Node attributeNode = node.getAttributes().getNamedItemNS(namespaceURI, localName);
if(attributeNode == null) {
return null;
}
return attributeNode.getTextContent();
}
/**
* Build schema location map of schemas used in given
* <code>schemaLocationAttribute</code>.
*
* @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema
* Part 0: Primer Second Edition | 5.6 schemaLocation</a>
*
* @param schemaLocation
* attribute value to build schema location map from.
* @return {@link Map} of schema namespace URIs to location URLs.
* @since 8.1
*/
public static final Map<String, List<String>> buildSchemaLocationMap(final String schemaLocation) {
return buildSchemaLocationMap(new HashMap<String, List<String>>(), schemaLocation);
}
/**
* Build schema location map of schemas used in given
* <code>schemaLocationAttribute</code> and adds them to the given
* <code>schemaLocationMap</code>.
*
* @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema
* Part 0: Primer Second Edition | 5.6 schemaLocation</a>
*
* @param schemaLocationMap
* {@link Map} to add schema locations to.
* @param schemaLocation
* attribute value to build schema location map from.
* @return {@link Map} of schema namespace URIs to location URLs.
* @since 8.1
*/
static final Map<String, List<String>> buildSchemaLocationMap(Map<String, List<String>> schemaLocationMap, final String schemaLocation) {
if (null == schemaLocation) {
return schemaLocationMap;
}
if (null == schemaLocation || schemaLocation.isEmpty()) {
// should really ever be null but being safe.
return schemaLocationMap;
}
final StringTokenizer st = new StringTokenizer(schemaLocation, " \n\t\r");
while (st.hasMoreElements()) {
final String ns = st.nextToken();
final String loc = st.nextToken();
List<String> locs = schemaLocationMap.get(ns);
if (null == locs) {
locs = new ArrayList<>();
schemaLocationMap.put(ns, locs);
}
if (!locs.contains(loc)) {
locs.add(loc);
}
}
return schemaLocationMap;
}
/*****
* Deletes all the node from the document which match the definition provided by xmlentity
* @param doc
* @param xmlEntity
* @throws Exception
*/
public static void deleteNode(Document doc , XmlEntity xmlEntity) throws Exception {
NodeList nodes = getNodes(doc, xmlEntity);
if (nodes != null) {
int length = nodes.getLength();
for (int i=0; i<length; i++) {
Node node = nodes.item(i);
node.getParentNode().removeChild(node);
}
}
}
/****
* Gets all the nodes matching the definition given by the xml entity
* @param doc
* @param xmlEntity
* @return Nodes
* @throws XPathExpressionException
*/
public static NodeList getNodes(Document doc, XmlEntity xmlEntity) throws XPathExpressionException {
return query(doc, xmlEntity.getSearchString(), new XPathContext(xmlEntity.getPrefix(), xmlEntity.getNamespace()));
}
/**
* An object used by an XPath query that maps namespaces to uris.
*
* @see NamespaceContext
* @author dsmith
*
*/
public static class XPathContext implements NamespaceContext {
private HashMap<String, String> prefixToUri = new HashMap<String, String>();
private HashMap<String, String> uriToPrefix = new HashMap<String, String>();
public XPathContext() {
}
public XPathContext(String prefix, String uri) {
addNamespace(prefix, uri);
}
public void addNamespace(String prefix, String uri) {
this.prefixToUri.put(prefix, uri);
this.uriToPrefix.put(uri, prefix);
}
@Override
public String getNamespaceURI(String prefix) {
return prefixToUri.get(prefix);
}
@Override
public String getPrefix(String namespaceURI) {
return uriToPrefix.get(namespaceURI);
}
@Override
public Iterator<String> getPrefixes(String namespaceURI) {
return Collections.singleton(getPrefix(namespaceURI)).iterator();
}
}
/****
* Converts the document to a well formatted Xml string
* @param doc
* @return pretty xml string
* @throws IOException
* @throws TransformerException
* @throws TransformerFactoryConfigurationError
*/
public static String prettyXml(Node doc) throws IOException, TransformerFactoryConfigurationError, TransformerException {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
return transform(transformer, doc);
}
public static final String elementToString(Node element) throws TransformerFactoryConfigurationError, TransformerException {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
return transform(transformer, element);
}
private static final String transform(Transformer transformer, Node element) throws TransformerException {
StreamResult result = new StreamResult(new StringWriter());
DOMSource source = new DOMSource(element);
transformer.transform(source, result);
String xmlString = result.getWriter().toString();
return xmlString;
}
/****
* Convert the xmlString to pretty well formatted xmlString
* @param xmlContent
* @return pretty xml string
* @throws IOException
* @throws TransformerException
* @throws TransformerFactoryConfigurationError
* @throws ParserConfigurationException
* @throws SAXException
*/
public static String prettyXml(String xmlContent) throws IOException, TransformerFactoryConfigurationError, TransformerException, SAXException, ParserConfigurationException {
Document doc = createDocumentFromXml(xmlContent);
return prettyXml(doc);
}
/***
* Create a document from the xml
* @param xmlContent
* @return Document
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
*/
public static Document createDocumentFromXml(String xmlContent) throws SAXException, ParserConfigurationException, IOException {
return createDocumentFromReader(new StringReader(xmlContent));
}
/**
* Upgrade the schema of a given Config XMl <code>document</code> to the given
* <code>namespace</code>, <code>schemaLocation</code> and
* <code>version</code>.
*
* @param document
* Config XML {@link Document} to upgrade.
* @param namespaceUri
* Namespace URI to upgrade to.
* @param schemaLocation
* Schema location to upgrade to.
* @throws XPathExpressionException
* @throws ParserConfigurationException
* @since 8.1
*/
// UnitTest SharedConfigurationTest.testCreateAndUpgradeDocumentFromXml()
public static Document upgradeSchema(Document document, final String namespaceUri, final String schemaLocation, String schemaVersion) throws XPathExpressionException, ParserConfigurationException {
if (StringUtils.isBlank(namespaceUri)) {
throw new IllegalArgumentException("namespaceUri");
}
if (StringUtils.isBlank(schemaLocation)) {
throw new IllegalArgumentException("schemaLocation");
}
if (StringUtils.isBlank(schemaVersion)) {
throw new IllegalArgumentException("schemaVersion");
}
if (null != document.getDoctype()) {
//doc.setDocType(null);
Node root = document.getDocumentElement();
Document copiedDocument = getDocumentBuilder().newDocument();
Node copiedRoot = copiedDocument.importNode(root, true);
copiedDocument.appendChild(copiedRoot);
document = copiedDocument;
}
final Element root = document.getDocumentElement();
final Map<String, String> namespacePrefixMap = buildNamespacePrefixMap(root);
// Add CacheXml namespace if missing.
String cachePrefix = namespacePrefixMap.get(namespaceUri);
if (null == cachePrefix) {
// Default to null prefix.
cachePrefix = NULL_NS_URI;
// Move all into new namespace
changeNamespace(root, NULL_NS_URI, namespaceUri);
namespacePrefixMap.put(namespaceUri, cachePrefix);
}
// Add schema instance namespace if missing.
String xsiPrefix = namespacePrefixMap.get(W3C_XML_SCHEMA_INSTANCE_NS_URI);
if (null == xsiPrefix) {
xsiPrefix = W3C_XML_SCHEMA_INSTANCE_PREFIX;
root.setAttribute("xmlns:" + xsiPrefix, W3C_XML_SCHEMA_INSTANCE_NS_URI);
namespacePrefixMap.put(W3C_XML_SCHEMA_INSTANCE_NS_URI, xsiPrefix);
}
// Create schemaLocation attribute if missing.
final String schemaLocationAttribute = getAttribute(root, W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION, W3C_XML_SCHEMA_INSTANCE_NS_URI);
// Update schemaLocation for namespace.
final Map<String, List<String>> schemaLocationMap = buildSchemaLocationMap(schemaLocationAttribute);
List<String> schemaLocations = schemaLocationMap.get(namespaceUri);
if (null == schemaLocations) {
schemaLocations = new ArrayList<String>();
schemaLocationMap.put(namespaceUri, schemaLocations);
}
schemaLocations.clear();
schemaLocations.add(schemaLocation);
String schemaLocationValue = getSchemaLocationValue(schemaLocationMap);
root.setAttributeNS(W3C_XML_SCHEMA_INSTANCE_NS_URI, xsiPrefix + ":" + W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION, schemaLocationValue);
// Set schema version
if(cachePrefix== null || cachePrefix.isEmpty()) {
root.setAttribute("version", schemaVersion);
} else {
root.setAttributeNS(namespaceUri, cachePrefix + ":version", schemaVersion);
}
return document;
}
/**
* Set the <code>schemaLocationAttribute</code> to the values of the
* <code>schemaLocationMap</code>.
*
* @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema
* Part 0: Primer Second Edition | 5.6 schemaLocation</a>
*
* @param schemaLocationMap
* {@link Map} to get schema locations from.
* @since 8.1
*/
private static final String getSchemaLocationValue(final Map<String, List<String>> schemaLocationMap) {
final StringBuilder sb = new StringBuilder();
for (final Map.Entry<String, List<String>> entry : schemaLocationMap.entrySet()) {
for (final String schemaLocation : entry.getValue()) {
if (sb.length() > 0) {
sb.append(' ');
}
sb.append(entry.getKey()).append(' ').append(schemaLocation);
}
}
return sb.toString();
}
/**
* Build {@link Map} of namespace URIs to prefixes.
*
* @param root
* {@link Element} to get namespaces and prefixes from.
* @return {@link Map} of namespace URIs to prefixes.
* @since 8.1
*/
private static final Map<String, String> buildNamespacePrefixMap(final Element root) {
final HashMap<String, String> namespacePrefixMap = new HashMap<>();
//Look for all of the attributes of cache that start with
//xmlns
NamedNodeMap attributes = root.getAttributes();
for(int i = 0; i < attributes.getLength(); i++) {
Node item = attributes.item(i);
if(item.getNodeName().startsWith("xmlns")) {
//Anything after the colon is the prefix
//eg xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
//has a prefix of xsi
String[] splitName = item.getNodeName().split(":");
String prefix;
if(splitName.length > 1) {
prefix = splitName[1];
} else {
prefix = "";
}
String uri = item.getTextContent();
namespacePrefixMap.put(uri, prefix);
}
}
return namespacePrefixMap;
}
/**
* Change the namespace URI of a <code>node</code> and its children to
* <code>newNamespaceUri</code> if that node is in the given
* <code>oldNamespaceUri</code> namespace URI.
*
*
* @param node
* {@link Node} to change namespace URI on.
* @param oldNamespaceUri
* old namespace URI to change from.
* @param newNamespaceUri
* new Namespace URI to change to.
* @throws XPathExpressionException
* @return the modified version of the passed in node.
* @since 8.1
*/
static final Node changeNamespace(final Node node, final String oldNamespaceUri, final String newNamespaceUri) throws XPathExpressionException {
Node result = null;
final NodeList nodes = query(node, "//*");
for (int i = 0; i < nodes.getLength(); i++) {
final Node element = nodes.item(i);
if (element.getNamespaceURI() == null || element.getNamespaceURI().equals(oldNamespaceUri)) {
Node renamed = node.getOwnerDocument().renameNode(element, newNamespaceUri, element.getNodeName());
if(element == node) {
result = renamed;
}
}
}
return result;
}
/****
* Method to modify the root attribute (cache) of the XML
* @param doc Target document whose root attributes must be modified
* @param xmlEntity xml entity for the root , it also contains the attributes
* @throws IOException
*/
public static void modifyRootAttributes(Document doc, XmlEntity xmlEntity) throws IOException {
if (xmlEntity == null || xmlEntity.getAttributes() == null) {
return;
}
String type = xmlEntity.getType();
Map<String, String> attributes = xmlEntity.getAttributes();
Element root = doc.getDocumentElement();
if (root.getLocalName().equals(type)) {
for (Entry<String, String> entry : attributes.entrySet()) {
String attributeName = entry.getKey();
String attributeValue = entry.getValue();
//Remove the existing attribute
String rootAttribute = getAttribute(root, attributeName);
if (null != rootAttribute) {
root.removeAttribute(rootAttribute);
}
//Add the new attribute with new value
root.setAttribute(attributeName, attributeValue);
}
}
}
/***
* Reads the xml file as a String
* @param xmlFilePath
* @return String containing xml read from the file.
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws TransformerException
* @throws TransformerFactoryConfigurationError
*/
public static String readXmlAsStringFromFile(String xmlFilePath) throws IOException, SAXException, ParserConfigurationException, TransformerFactoryConfigurationError, TransformerException{
File file = new File(xmlFilePath);
//The file can be empty if the only command we have issued for this group is deployJar
if(file.length() == 0) {
return "";
}
Document doc = getDocumentBuilder().parse(file);
return elementToString(doc);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.